Actual source code: damg.c

  1: /*$Id: damg.c,v 1.35 2001/07/20 21:25:12 bsmith Exp $*/
  2: 
 3:  #include petscda.h
 4:  #include petscsles.h
 5:  #include petscmg.h

  7: /*
  8:    Code for almost fully managing multigrid/multi-level linear solvers for DA grids
  9: */

 11: /*@C
 12:     DMMGCreate - Creates a DA based multigrid solver object. This allows one to 
 13:       easily implement MG methods on regular grids.

 15:     Collective on MPI_Comm

 17:     Input Parameter:
 18: +   comm - the processors that will share the grids and solution process
 19: .   nlevels - number of multigrid levels 
 20: -   user - an optional user context

 22:     Output Parameters:
 23: .    - the context

 25:     Notes:
 26:       To provide a different user context for each level call DMMGSetUser() after calling
 27:       this routine

 29:     Level: advanced

 31: .seealso DMMGDestroy() 

 33: @*/
 34: int DMMGCreate(MPI_Comm comm,int nlevels,void *user,DMMG **dmmg)
 35: {
 36:   int        ierr,i;
 37:   DMMG       *p;

 40:   PetscOptionsGetInt(0,"-dmmg_nlevels",&nlevels,PETSC_IGNORE);

 42:   PetscMalloc(nlevels*sizeof(DMMG),&p);
 43:   for (i=0; i<nlevels; i++) {
 44:     ierr             = PetscNew(struct _p_DMMG,&p[i]);
 45:     ierr             = PetscMemzero(p[i],sizeof(struct _p_DMMG));
 46:     p[i]->nlevels    = nlevels - i;
 47:     p[i]->comm       = comm;
 48:     p[i]->user       = user;
 49:   }
 50:   *dmmg = p;
 51:   return(0);
 52: }


 55: /*@C
 56:     DMMGDestroy - Destroys a DA based multigrid solver object. 

 58:     Collective on DMMG

 60:     Input Parameter:
 61: .    - the context

 63:     Level: advanced

 65: .seealso DMMGCreate()

 67: @*/
 68: int DMMGDestroy(DMMG *dmmg)
 69: {
 70:   int     ierr,i,nlevels = dmmg[0]->nlevels;

 73:   if (!dmmg) SETERRQ(1,"Passing null as DMMG");

 75:   for (i=1; i<nlevels; i++) {
 76:     if (dmmg[i]->R) {MatDestroy(dmmg[i]->R);}
 77:   }
 78:   for (i=0; i<nlevels; i++) {
 79:     if (dmmg[i]->dm) {DMDestroy(dmmg[i]->dm);}
 80:     if (dmmg[i]->x)  {VecDestroy(dmmg[i]->x);}
 81:     if (dmmg[i]->b)  {VecDestroy(dmmg[i]->b);}
 82:     if (dmmg[i]->r)  {VecDestroy(dmmg[i]->r);}
 83:     if (dmmg[i]->work1)  {VecDestroy(dmmg[i]->work1);}
 84:     if (dmmg[i]->w)  {VecDestroy(dmmg[i]->w);}
 85:     if (dmmg[i]->work2)  {VecDestroy(dmmg[i]->work2);}
 86:     if (dmmg[i]->lwork1)  {VecDestroy(dmmg[i]->lwork1);}
 87:     if (dmmg[i]->B && dmmg[i]->B != dmmg[i]->J) {MatDestroy(dmmg[i]->B);}
 88:     if (dmmg[i]->J)  {MatDestroy(dmmg[i]->J);}
 89:     if (dmmg[i]->Rscale)  {VecDestroy(dmmg[i]->Rscale);}
 90:     if (dmmg[i]->fdcoloring)  {MatFDColoringDestroy(dmmg[i]->fdcoloring);}
 91:     if (dmmg[i]->sles)  {SLESDestroy(dmmg[i]->sles);}
 92:     if (dmmg[i]->snes)  {PetscObjectDestroy((PetscObject)dmmg[i]->snes);}
 93:     PetscFree(dmmg[i]);
 94:   }
 95:   PetscFree(dmmg);
 96:   return(0);
 97: }

 99: /*@C
100:     DMMGSetDM - Sets the coarse grid information for the grids

102:     Collective on DMMG

104:     Input Parameter:
105: +   dmmg - the context
106: -   dm - the DA or VecPack object

108:     Level: advanced

110: .seealso DMMGCreate(), DMMGDestroy()

112: @*/
113: int DMMGSetDM(DMMG *dmmg,DM dm)
114: {
115:   int        ierr,i,nlevels = dmmg[0]->nlevels;

118:   if (!dmmg) SETERRQ(1,"Passing null as DMMG");

120:   /* Create DA data structure for all the levels */
121:   dmmg[0]->dm = dm;
122:   PetscObjectReference((PetscObject)dm);
123:   for (i=1; i<nlevels; i++) {
124:     DMRefine(dmmg[i-1]->dm,dmmg[i]->comm,&dmmg[i]->dm);
125:   }
126:   DMMGSetUp(dmmg);
127:   return(0);
128: }

130: /*@C
131:     DMMGSetUp - Prepares the DMMG to solve a system

133:     Collective on DMMG

135:     Input Parameter:
136: .   dmmg - the context

138:     Level: advanced

140: .seealso DMMGCreate(), DMMGDestroy(), DMMG, DMMGSetSNES(), DMMGSetSLES(), DMMGSolve()

142: @*/
143: int DMMGSetUp(DMMG *dmmg)
144: {
145:   int        ierr,i,nlevels = dmmg[0]->nlevels;


149:   /* Create work vectors and matrix for each level */
150:   for (i=0; i<nlevels; i++) {
151:     DMCreateGlobalVector(dmmg[i]->dm,&dmmg[i]->x);
152:     VecDuplicate(dmmg[i]->x,&dmmg[i]->b);
153:     VecDuplicate(dmmg[i]->x,&dmmg[i]->r);
154:   }

156:   /* Create interpolation/restriction between levels */
157:   for (i=1; i<nlevels; i++) {
158:     DMGetInterpolation(dmmg[i-1]->dm,dmmg[i]->dm,&dmmg[i]->R,PETSC_NULL);
159:   }

161:   return(0);
162: }

164: /*@C
165:     DMMGSolve - Actually solves the (non)linear system defined with the DMMG

167:     Collective on DMMG

169:     Input Parameter:
170: .   dmmg - the context

172:     Level: advanced

174:      Notes: For linear (SLES) problems may be called more than once, uses the same 
175:     matrices but recomputes the right hand side for each new solve. Call DMMGSetSLES()
176:     to generate new matrices.
177:  
178: .seealso DMMGCreate(), DMMGDestroy(), DMMG, DMMGSetSNES(), DMMGSetSLES(), DMMGSetUp()

180: @*/
181: int DMMGSolve(DMMG *dmmg)
182: {
183:   int        i,ierr,nlevels = dmmg[0]->nlevels;
184:   PetscTruth gridseq,vecmonitor,flg;
185:   KSP        ksp;

188:   PetscOptionsHasName(0,"-dmmg_grid_sequence",&gridseq);
189:   PetscOptionsHasName(0,"-dmmg_vecmonitor",&vecmonitor);
190:   if (gridseq) {
191:     if (dmmg[0]->initialguess) {
192:       (*dmmg[0]->initialguess)(dmmg[0]->snes,dmmg[0]->x,dmmg[0]);
193:       if (dmmg[0]->sles) {
194:         SLESGetKSP(dmmg[0]->sles,&ksp);
195:         KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
196:       }
197:     }
198:     for (i=0; i<nlevels-1; i++) {
199:       (*dmmg[i]->solve)(dmmg,i);
200:       if (vecmonitor) {
201:         VecView(dmmg[i]->x,PETSC_VIEWER_DRAW_(dmmg[i]->comm));
202:       }
203:       MatInterpolate(dmmg[i+1]->R,dmmg[i]->x,dmmg[i+1]->x);
204:       if (dmmg[i+1]->sles) {
205:         SLESGetKSP(dmmg[i+1]->sles,&ksp);
206:         KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
207:       }
208:     }
209:   } else {
210:     if (dmmg[nlevels-1]->initialguess) {
211:       (*dmmg[nlevels-1]->initialguess)(dmmg[nlevels-1]->snes,dmmg[nlevels-1]->x,dmmg[nlevels-1]);
212:       if (dmmg[nlevels-1]->sles) {
213:         SLESGetKSP(dmmg[nlevels-1]->sles,&ksp);
214:         KSPSetInitialGuessNonzero(ksp,PETSC_TRUE);
215:       }
216:     }
217:   }
218:   (*DMMGGetFine(dmmg)->solve)(dmmg,nlevels-1);
219:   if (vecmonitor) {
220:      VecView(dmmg[nlevels-1]->x,PETSC_VIEWER_DRAW_(dmmg[nlevels-1]->comm));
221:   }

223:   PetscOptionsHasName(PETSC_NULL,"-dmmg_view",&flg);
224:   if (flg && !PetscPreLoadingOn) {
225:     DMMGView(dmmg,PETSC_VIEWER_STDOUT_(dmmg[0]->comm));
226:   }
227:   return(0);
228: }

230: int DMMGSolveSLES(DMMG *dmmg,int level)
231: {
232:   int        ierr,its;

235:   (*dmmg[level]->rhs)(dmmg[level],dmmg[level]->b);
236:   if (dmmg[level]->matricesset) {
237:     SLESSetOperators(dmmg[level]->sles,dmmg[level]->J,dmmg[level]->J,SAME_NONZERO_PATTERN);
238:     dmmg[level]->matricesset = PETSC_FALSE;
239:   }
240:   SLESSolve(dmmg[level]->sles,dmmg[level]->b,dmmg[level]->x,&its);
241:   return(0);
242: }

244: /*
245:     Sets each of the linear solvers to use multigrid 
246: */
247: int DMMGSetUpLevel(DMMG *dmmg,SLES sles,int nlevels)
248: {
249:   int         ierr,i;
250:   PC          pc;
251:   PetscTruth  ismg,monitor,ismf,isshell,ismffd;
252:   SLES        lsles; /* solver internal to the multigrid preconditioner */
253:   MPI_Comm    *comms,comm;
254:   PetscViewer ascii;
255:   KSP         ksp;


259:   if (!dmmg) SETERRQ(1,"Passing null as DMMG");

261:   PetscOptionsHasName(PETSC_NULL,"-dmmg_ksp_monitor",&monitor);
262:   if (monitor) {
263:     SLESGetKSP(sles,&ksp);
264:     PetscObjectGetComm((PetscObject)ksp,&comm);
265:     PetscViewerASCIIOpen(comm,"stdout",&ascii);
266:     PetscViewerASCIISetTab(ascii,1+dmmg[0]->nlevels-nlevels);
267:     KSPSetMonitor(ksp,KSPDefaultMonitor,ascii,(int(*)(void*))PetscViewerDestroy);
268:   }

270:   /* use fgmres on outer iteration by default */
271:   SLESGetKSP(sles,&ksp);
272:   KSPSetType(ksp,KSPFGMRES);

274:   ierr  = SLESGetPC(sles,&pc);
275:   ierr  = PCSetType(pc,PCMG);
276:   ierr  = PetscMalloc(nlevels*sizeof(MPI_Comm),&comms);
277:   for (i=0; i<nlevels; i++) {
278:     comms[i] = dmmg[i]->comm;
279:   }
280:   ierr  = MGSetLevels(pc,nlevels,comms);
281:   ierr  = PetscFree(comms);
282:    MGSetType(pc,MGFULL);

284:   PetscTypeCompare((PetscObject)pc,PCMG,&ismg);
285:   if (ismg) {

287:     /* set solvers for each level */
288:     for (i=0; i<nlevels; i++) {
289:       MGGetSmoother(pc,i,&lsles);
290:       SLESSetOperators(lsles,dmmg[i]->J,dmmg[i]->B,DIFFERENT_NONZERO_PATTERN);
291:       MGSetX(pc,i,dmmg[i]->x);
292:       MGSetRhs(pc,i,dmmg[i]->b);
293:       MGSetR(pc,i,dmmg[i]->r);
294:       MGSetResidual(pc,i,MGDefaultResidual,dmmg[i]->J);
295:       if (monitor) {
296:         SLESGetKSP(lsles,&ksp);
297:         PetscObjectGetComm((PetscObject)ksp,&comm);
298:         PetscViewerASCIIOpen(comm,"stdout",&ascii);
299:         PetscViewerASCIISetTab(ascii,1+dmmg[0]->nlevels-i);
300:         KSPSetMonitor(ksp,KSPDefaultMonitor,ascii,(int(*)(void*))PetscViewerDestroy);
301:       }
302:       /* If using a matrix free multiply and did not provide an explicit matrix to build
303:          the preconditioner then must use no preconditioner 
304:       */
305:       PetscTypeCompare((PetscObject)dmmg[i]->B,MATSHELL,&isshell);
306:       PetscTypeCompare((PetscObject)dmmg[i]->B,MATDAAD,&ismf);
307:       PetscTypeCompare((PetscObject)dmmg[i]->B,MATMFFD,&ismffd);
308:       if (isshell || ismf || ismffd) {
309:         PC lpc;
310:         SLESGetPC(lsles,&lpc);
311:         PCSetType(lpc,PCNONE);
312:       }
313:     }

315:     /* Set interpolation/restriction between levels */
316:     for (i=1; i<nlevels; i++) {
317:       MGSetInterpolate(pc,i,dmmg[i]->R);
318:       MGSetRestriction(pc,i,dmmg[i]->R);
319:     }
320:   }
321:   return(0);
322: }

324: /*@C
325:     DMMGSetSLES - Sets the linear solver object that will use the grid hierarchy

327:     Collective on DMMG

329:     Input Parameter:
330: +   dmmg - the context
331: .   func - function to compute linear system matrix on each grid level
332: -   rhs - function to compute right hand side on each level (need only work on the finest grid
333:           if you do not use grid sequencing

335:     Level: advanced

337:     Notes: For linear problems my be called more than once, reevaluates the matrices if it is called more
338:        than once. Call DMMGSolve() directly several times to solve with the same matrix but different 
339:        right hand sides.
340:    
341: .seealso DMMGCreate(), DMMGDestroy, DMMGSetDM(), DMMGSolve()

343: @*/
344: int DMMGSetSLES(DMMG *dmmg,int (*rhs)(DMMG,Vec),int (*func)(DMMG,Mat))
345: {
346:   int  ierr,i,nlevels = dmmg[0]->nlevels;

349:   if (!dmmg) SETERRQ(1,"Passing null as DMMG");

351:   if (!dmmg[0]->sles) {
352:     /* create solvers for each level */
353:     for (i=0; i<nlevels; i++) {

355:       if (!dmmg[i]->B) {
356:         DMGetMatrix(dmmg[i]->dm,MATMPIAIJ,&dmmg[i]->B);
357:       }
358:       if (!dmmg[i]->J) {
359:         dmmg[i]->J = dmmg[i]->B;
360:       }

362:       SLESCreate(dmmg[i]->comm,&dmmg[i]->sles);
363:       DMMGSetUpLevel(dmmg,dmmg[i]->sles,i+1);
364:       SLESSetFromOptions(dmmg[i]->sles);
365:       dmmg[i]->solve = DMMGSolveSLES;
366:       dmmg[i]->rhs   = rhs;
367:     }
368:   }

370:   /* evalute matrix on each level */
371:   for (i=0; i<nlevels; i++) {
372:     (*func)(dmmg[i],dmmg[i]->J);
373:     dmmg[i]->matricesset = PETSC_TRUE;
374:   }

376:   for (i=0; i<nlevels-1; i++) {
377:     SLESSetOptionsPrefix(dmmg[i]->sles,"dmmg_");
378:   }

380:   return(0);
381: }

383: /*@C
384:     DMMGView - prints information on a DA based multi-level preconditioner

386:     Collective on DMMG and PetscViewer

388:     Input Parameter:
389: +   dmmg - the context
390: -   viewer - the viewer

392:     Level: advanced

394: .seealso DMMGCreate(), DMMGDestroy

396: @*/
397: int DMMGView(DMMG *dmmg,PetscViewer viewer)
398: {
399:   int            ierr,i,nlevels = dmmg[0]->nlevels,flag;
400:   MPI_Comm       comm;
401:   PetscTruth     isascii;

404:   if (!dmmg) SETERRQ(1,"Passing null as DMMG");
406:   PetscObjectGetComm((PetscObject)viewer,&comm);
407:   MPI_Comm_compare(comm,dmmg[0]->comm,&flag);
408:   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) {
409:     SETERRQ(PETSC_ERR_ARG_NOTSAMECOMM,"Different communicators in the DMMG and the PetscViewer");
410:   }

412:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
413:   if (isascii) {
414:     PetscViewerASCIIPrintf(viewer,"DMMG Object with %d levelsn",nlevels);
415:   }
416:   for (i=0; i<nlevels; i++) {
417:     PetscViewerASCIIPushTab(viewer);
418:     DMView(dmmg[i]->dm,viewer);
419:     PetscViewerASCIIPopTab(viewer);
420:   }
421:   if (isascii) {
422:     PetscViewerASCIIPrintf(viewer,"%s Object on finest leveln",dmmg[nlevels-1]->sles ? "SLES" : "SNES");
423:   }
424:   if (dmmg[nlevels-1]->sles) {
425:     SLESView(dmmg[nlevels-1]->sles,viewer);
426:   } else {
427:     /* use of PetscObjectView() means we do not have to link with libpetscsnes if SNES is not being used */
428:     PetscObjectView((PetscObject)dmmg[nlevels-1]->snes,viewer);
429:   }
430:   return(0);
431: }