Actual source code: damg.c
1: /*$Id: damg.c,v 1.35 2001/07/20 21:25:12 bsmith Exp $*/
2:
3: #include petscda.h
4: #include petscsles.h
5: #include petscmg.h
7: /*
8: Code for almost fully managing multigrid/multi-level linear solvers for DA grids
9: */
13: /*@C
14: DMMGCreate - Creates a DA based multigrid solver object. This allows one to
15: easily implement MG methods on regular grids.
17: Collective on MPI_Comm
19: Input Parameter:
20: + comm - the processors that will share the grids and solution process
21: . nlevels - number of multigrid levels
22: - user - an optional user context
24: Output Parameters:
25: . - the context
27: Notes:
28: To provide a different user context for each level call DMMGSetUser() after calling
29: this routine
31: Level: advanced
33: .seealso DMMGDestroy()
35: @*/
36: int DMMGCreate(MPI_Comm comm,int nlevels,void *user,DMMG **dmmg)
37: {
38: int ierr,i;
39: DMMG *p;
40: PetscTruth galerkin;
43: PetscOptionsGetInt(0,"-dmmg_nlevels",&nlevels,PETSC_IGNORE);
44: PetscOptionsHasName(0,"-dmmg_galerkin",&galerkin);
46: PetscMalloc(nlevels*sizeof(DMMG),&p);
47: for (i=0; i<nlevels; i++) {
48: PetscNew(struct _p_DMMG,&p[i]);
49: PetscMemzero(p[i],sizeof(struct _p_DMMG));
50: p[i]->nlevels = nlevels - i;
51: p[i]->comm = comm;
52: p[i]->user = user;
53: p[i]->galerkin = galerkin;
54: }
55: *dmmg = p;
56: return(0);
57: }
61: /*@C
62: DMMGSetUseGalerkinCoarse - Courses the DMMG to use R*A_f*R^T to form
63: the coarser matrices from finest
65: Collective on DMMG
67: Input Parameter:
68: . - the context
70: Level: advanced
72: .seealso DMMGCreate()
74: @*/
75: int DMMGSetUseGalerkinCoarse(DMMG* dmmg)
76: {
77: int i,nlevels = dmmg[0]->nlevels;
80: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
82: for (i=0; i<nlevels; i++) {
83: dmmg[i]->galerkin = PETSC_TRUE;
84: }
85: return(0);
86: }
90: /*@C
91: DMMGDestroy - Destroys a DA based multigrid solver object.
93: Collective on DMMG
95: Input Parameter:
96: . - the context
98: Level: advanced
100: .seealso DMMGCreate()
102: @*/
103: int DMMGDestroy(DMMG *dmmg)
104: {
105: int ierr,i,nlevels = dmmg[0]->nlevels;
108: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
110: for (i=1; i<nlevels; i++) {
111: if (dmmg[i]->R) {MatDestroy(dmmg[i]->R);}
112: }
113: for (i=0; i<nlevels; i++) {
114: if (dmmg[i]->dm) {DMDestroy(dmmg[i]->dm);}
115: if (dmmg[i]->x) {VecDestroy(dmmg[i]->x);}
116: if (dmmg[i]->b) {VecDestroy(dmmg[i]->b);}
117: if (dmmg[i]->r) {VecDestroy(dmmg[i]->r);}
118: if (dmmg[i]->work1) {VecDestroy(dmmg[i]->work1);}
119: if (dmmg[i]->w) {VecDestroy(dmmg[i]->w);}
120: if (dmmg[i]->work2) {VecDestroy(dmmg[i]->work2);}
121: if (dmmg[i]->lwork1) {VecDestroy(dmmg[i]->lwork1);}
122: if (dmmg[i]->B && dmmg[i]->B != dmmg[i]->J) {MatDestroy(dmmg[i]->B);}
123: if (dmmg[i]->J) {MatDestroy(dmmg[i]->J);}
124: if (dmmg[i]->Rscale) {VecDestroy(dmmg[i]->Rscale);}
125: if (dmmg[i]->fdcoloring){MatFDColoringDestroy(dmmg[i]->fdcoloring);}
126: if (dmmg[i]->sles) {SLESDestroy(dmmg[i]->sles);}
127: if (dmmg[i]->snes) {PetscObjectDestroy((PetscObject)dmmg[i]->snes);}
128: if (dmmg[i]->inject) {VecScatterDestroy(dmmg[i]->inject);}
129: PetscFree(dmmg[i]);
130: }
131: PetscFree(dmmg);
132: return(0);
133: }
137: /*@C
138: DMMGSetDM - Sets the coarse grid information for the grids
140: Collective on DMMG
142: Input Parameter:
143: + dmmg - the context
144: - dm - the DA or VecPack object
146: Level: advanced
148: .seealso DMMGCreate(), DMMGDestroy()
150: @*/
151: int DMMGSetDM(DMMG *dmmg,DM dm)
152: {
153: int ierr,i,nlevels = dmmg[0]->nlevels;
156: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
158: /* Create DA data structure for all the levels */
159: dmmg[0]->dm = dm;
160: PetscObjectReference((PetscObject)dm);
161: for (i=1; i<nlevels; i++) {
162: DMRefine(dmmg[i-1]->dm,dmmg[i]->comm,&dmmg[i]->dm);
163: }
164: DMMGSetUp(dmmg);
165: return(0);
166: }
170: /*@C
171: DMMGSetUp - Prepares the DMMG to solve a system
173: Collective on DMMG
175: Input Parameter:
176: . dmmg - the context
178: Level: advanced
180: .seealso DMMGCreate(), DMMGDestroy(), DMMG, DMMGSetSNES(), DMMGSetSLES(), DMMGSolve()
182: @*/
183: int DMMGSetUp(DMMG *dmmg)
184: {
185: int ierr,i,nlevels = dmmg[0]->nlevels;
189: /* Create work vectors and matrix for each level */
190: for (i=0; i<nlevels; i++) {
191: DMCreateGlobalVector(dmmg[i]->dm,&dmmg[i]->x);
192: VecDuplicate(dmmg[i]->x,&dmmg[i]->b);
193: VecDuplicate(dmmg[i]->x,&dmmg[i]->r);
194: }
196: /* Create interpolation/restriction between levels */
197: for (i=1; i<nlevels; i++) {
198: DMGetInterpolation(dmmg[i-1]->dm,dmmg[i]->dm,&dmmg[i]->R,PETSC_NULL);
199: }
201: return(0);
202: }
206: /*@C
207: DMMGSolve - Actually solves the (non)linear system defined with the DMMG
209: Collective on DMMG
211: Input Parameter:
212: . dmmg - the context
214: Level: advanced
216: Notes: For linear (SLES) problems may be called more than once, uses the same
217: matrices but recomputes the right hand side for each new solve. Call DMMGSetSLES()
218: to generate new matrices.
219:
220: .seealso DMMGCreate(), DMMGDestroy(), DMMG, DMMGSetSNES(), DMMGSetSLES(), DMMGSetUp()
222: @*/
223: int DMMGSolve(DMMG *dmmg)
224: {
225: int i,ierr,nlevels = dmmg[0]->nlevels;
226: PetscTruth gridseq,vecmonitor,flg;
227: KSP ksp;
230: PetscOptionsHasName(0,"-dmmg_grid_sequence",&gridseq);
231: PetscOptionsHasName(0,"-dmmg_vecmonitor",&vecmonitor);
232: if (gridseq) {
233: if (dmmg[0]->initialguess) {
234: (*dmmg[0]->initialguess)(dmmg[0]->snes,dmmg[0]->x,dmmg[0]);
235: if (dmmg[0]->sles) {
236: SLESGetKSP(dmmg[0]->sles,&ksp);
237: KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
238: }
239: }
240: for (i=0; i<nlevels-1; i++) {
241: (*dmmg[i]->solve)(dmmg,i);
242: if (vecmonitor) {
243: VecView(dmmg[i]->x,PETSC_VIEWER_DRAW_(dmmg[i]->comm));
244: }
245: MatInterpolate(dmmg[i+1]->R,dmmg[i]->x,dmmg[i+1]->x);
246: if (dmmg[i+1]->sles) {
247: SLESGetKSP(dmmg[i+1]->sles,&ksp);
248: KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
249: }
250: }
251: } else {
252: if (dmmg[nlevels-1]->initialguess) {
253: (*dmmg[nlevels-1]->initialguess)(dmmg[nlevels-1]->snes,dmmg[nlevels-1]->x,dmmg[nlevels-1]);
254: if (dmmg[nlevels-1]->sles) {
255: SLESGetKSP(dmmg[nlevels-1]->sles,&ksp);
256: KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
257: }
258: }
259: }
260: (*DMMGGetFine(dmmg)->solve)(dmmg,nlevels-1);
261: if (vecmonitor) {
262: VecView(dmmg[nlevels-1]->x,PETSC_VIEWER_DRAW_(dmmg[nlevels-1]->comm));
263: }
265: PetscOptionsHasName(PETSC_NULL,"-dmmg_view",&flg);
266: if (flg && !PetscPreLoadingOn) {
267: DMMGView(dmmg,PETSC_VIEWER_STDOUT_(dmmg[0]->comm));
268: }
269: return(0);
270: }
274: int DMMGSolveSLES(DMMG *dmmg,int level)
275: {
276: int ierr,its;
279: (*dmmg[level]->rhs)(dmmg[level],dmmg[level]->b);
280: if (dmmg[level]->matricesset) {
281: SLESSetOperators(dmmg[level]->sles,dmmg[level]->J,dmmg[level]->J,SAME_NONZERO_PATTERN);
282: dmmg[level]->matricesset = PETSC_FALSE;
283: }
284: SLESSolve(dmmg[level]->sles,dmmg[level]->b,dmmg[level]->x,&its);
285: return(0);
286: }
288: /*
289: Sets each of the linear solvers to use multigrid
290: */
293: int DMMGSetUpLevel(DMMG *dmmg,SLES sles,int nlevels)
294: {
295: int ierr,i;
296: PC pc;
297: PetscTruth ismg,monitor,ismf,isshell,ismffd;
298: SLES lsles; /* solver internal to the multigrid preconditioner */
299: MPI_Comm *comms,comm;
300: PetscViewer ascii;
301: KSP ksp;
305: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
307: PetscOptionsHasName(PETSC_NULL,"-dmmg_ksp_monitor",&monitor);
308: if (monitor) {
309: SLESGetKSP(sles,&ksp);
310: PetscObjectGetComm((PetscObject)ksp,&comm);
311: PetscViewerASCIIOpen(comm,"stdout",&ascii);
312: PetscViewerASCIISetTab(ascii,1+dmmg[0]->nlevels-nlevels);
313: KSPSetMonitor(ksp,KSPDefaultMonitor,ascii,(int(*)(void*))PetscViewerDestroy);
314: }
316: /* use fgmres on outer iteration by default */
317: SLESGetKSP(sles,&ksp);
318: KSPSetType(ksp,KSPFGMRES);
320: SLESGetPC(sles,&pc);
321: PCSetType(pc,PCMG);
322: PetscMalloc(nlevels*sizeof(MPI_Comm),&comms);
323: for (i=0; i<nlevels; i++) {
324: comms[i] = dmmg[i]->comm;
325: }
326: MGSetLevels(pc,nlevels,comms);
327: PetscFree(comms);
328: MGSetType(pc,MGFULL);
330: PetscTypeCompare((PetscObject)pc,PCMG,&ismg);
331: if (ismg) {
333: /* set solvers for each level */
334: for (i=0; i<nlevels; i++) {
335: MGGetSmoother(pc,i,&lsles);
336: SLESSetOperators(lsles,dmmg[i]->J,dmmg[i]->B,DIFFERENT_NONZERO_PATTERN);
337: MGSetX(pc,i,dmmg[i]->x);
338: MGSetRhs(pc,i,dmmg[i]->b);
339: MGSetR(pc,i,dmmg[i]->r);
340: MGSetResidual(pc,i,MGDefaultResidual,dmmg[i]->J);
341: if (monitor) {
342: SLESGetKSP(lsles,&ksp);
343: PetscObjectGetComm((PetscObject)ksp,&comm);
344: PetscViewerASCIIOpen(comm,"stdout",&ascii);
345: PetscViewerASCIISetTab(ascii,1+dmmg[0]->nlevels-i);
346: KSPSetMonitor(ksp,KSPDefaultMonitor,ascii,(int(*)(void*))PetscViewerDestroy);
347: }
348: /* If using a matrix free multiply and did not provide an explicit matrix to build
349: the preconditioner then must use no preconditioner
350: */
351: PetscTypeCompare((PetscObject)dmmg[i]->B,MATSHELL,&isshell);
352: PetscTypeCompare((PetscObject)dmmg[i]->B,MATDAAD,&ismf);
353: PetscTypeCompare((PetscObject)dmmg[i]->B,MATMFFD,&ismffd);
354: if (isshell || ismf || ismffd) {
355: PC lpc;
356: SLESGetPC(lsles,&lpc);
357: PCSetType(lpc,PCNONE);
358: }
359: }
361: /* Set interpolation/restriction between levels */
362: for (i=1; i<nlevels; i++) {
363: MGSetInterpolate(pc,i,dmmg[i]->R);
364: MGSetRestriction(pc,i,dmmg[i]->R);
365: }
366: }
367: return(0);
368: }
370: extern int MatSeqAIJPtAP(Mat,Mat,Mat*);
374: /*@C
375: DMMGSetSLES - Sets the linear solver object that will use the grid hierarchy
377: Collective on DMMG
379: Input Parameter:
380: + dmmg - the context
381: . func - function to compute linear system matrix on each grid level
382: - rhs - function to compute right hand side on each level (need only work on the finest grid
383: if you do not use grid sequencing
385: Level: advanced
387: Notes: For linear problems my be called more than once, reevaluates the matrices if it is called more
388: than once. Call DMMGSolve() directly several times to solve with the same matrix but different
389: right hand sides.
390:
391: .seealso DMMGCreate(), DMMGDestroy, DMMGSetDM(), DMMGSolve()
393: @*/
394: int DMMGSetSLES(DMMG *dmmg,int (*rhs)(DMMG,Vec),int (*func)(DMMG,Mat))
395: {
396: int ierr,size,i,nlevels = dmmg[0]->nlevels;
397: PetscTruth galerkin;
400: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
401: galerkin = dmmg[0]->galerkin;
403: if (galerkin) {
404: MPI_Comm_size(dmmg[nlevels-1]->comm,&size);
405: DMGetMatrix(dmmg[nlevels-1]->dm,MATAIJ,&dmmg[nlevels-1]->B);
406: (*func)(dmmg[nlevels-1],dmmg[nlevels-1]->B);
407: for (i=nlevels-2; i>-1; i--) {
408: MatSeqAIJPtAP(dmmg[i+1]->B,dmmg[i+1]->R,&dmmg[i]->B);
409: }
410: }
412: if (!dmmg[0]->sles) {
413: /* create solvers for each level */
414: for (i=0; i<nlevels; i++) {
416: if (!dmmg[i]->B && !galerkin) {
417: MPI_Comm_size(dmmg[i]->comm,&size);
418: DMGetMatrix(dmmg[i]->dm,MATAIJ,&dmmg[i]->B);
419: }
420: if (!dmmg[i]->J) {
421: dmmg[i]->J = dmmg[i]->B;
422: }
424: SLESCreate(dmmg[i]->comm,&dmmg[i]->sles);
425: DMMGSetUpLevel(dmmg,dmmg[i]->sles,i+1);
426: SLESSetFromOptions(dmmg[i]->sles);
427: dmmg[i]->solve = DMMGSolveSLES;
428: dmmg[i]->rhs = rhs;
429: }
430: }
432: /* evalute matrix on each level */
433: for (i=0; i<nlevels; i++) {
434: if (!galerkin) {
435: (*func)(dmmg[i],dmmg[i]->J);
436: }
437: dmmg[i]->matricesset = PETSC_TRUE;
438: }
440: for (i=0; i<nlevels-1; i++) {
441: SLESSetOptionsPrefix(dmmg[i]->sles,"dmmg_");
442: }
444: return(0);
445: }
449: /*@C
450: DMMGView - prints information on a DA based multi-level preconditioner
452: Collective on DMMG and PetscViewer
454: Input Parameter:
455: + dmmg - the context
456: - viewer - the viewer
458: Level: advanced
460: .seealso DMMGCreate(), DMMGDestroy
462: @*/
463: int DMMGView(DMMG *dmmg,PetscViewer viewer)
464: {
465: int ierr,i,nlevels = dmmg[0]->nlevels,flag;
466: MPI_Comm comm;
467: PetscTruth isascii;
470: if (!dmmg) SETERRQ(1,"Passing null as DMMG");
472: PetscObjectGetComm((PetscObject)viewer,&comm);
473: MPI_Comm_compare(comm,dmmg[0]->comm,&flag);
474: if (flag != MPI_CONGRUENT && flag != MPI_IDENT) {
475: SETERRQ(PETSC_ERR_ARG_NOTSAMECOMM,"Different communicators in the DMMG and the PetscViewer");
476: }
478: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
479: if (isascii) {
480: PetscViewerASCIIPrintf(viewer,"DMMG Object with %d levels\n",nlevels);
481: }
482: for (i=0; i<nlevels; i++) {
483: PetscViewerASCIIPushTab(viewer);
484: DMView(dmmg[i]->dm,viewer);
485: PetscViewerASCIIPopTab(viewer);
486: }
487: if (isascii) {
488: PetscViewerASCIIPrintf(viewer,"%s Object on finest level\n",dmmg[nlevels-1]->sles ? "SLES" : "SNES");
489: if (dmmg[nlevels-1]->galerkin) {
490: PetscViewerASCIIPrintf(viewer,"Using Galerkin R^T*A*R process to compute coarser matrices");
491: }
492: }
493: if (dmmg[nlevels-1]->sles) {
494: SLESView(dmmg[nlevels-1]->sles,viewer);
495: } else {
496: /* use of PetscObjectView() means we do not have to link with libpetscsnes if SNES is not being used */
497: PetscObjectView((PetscObject)dmmg[nlevels-1]->snes,viewer);
498: }
499: return(0);
500: }