Actual source code: da2.c

  1: #define PETSCDM_DLL
  2: 
 3:  #include src/dm/da/daimpl.h

  7: /*@C
  8:       DAGetElements - Gets an array containing the indices (in local coordinates) 
  9:                  of all the local elements

 11:     Not Collective

 13:    Input Parameter:
 14: .     da - the DA object

 16:    Output Parameters:
 17: +     n - number of local elements
 18: -     e - the indices of the elements vertices

 20:    Level: intermediate

 22: .seealso: DAElementType, DASetElementType(), DARestoreElements()
 23: @*/
 24: PetscErrorCode  DAGetElements(DA da,PetscInt *n,const PetscInt *e[])
 25: {
 29:   (da->ops->getelements)(da,n,e);
 30:   return(0);
 31: }

 35: /*@C
 36:       DARestoreElements - Returns an array containing the indices (in local coordinates) 
 37:                  of all the local elements obtained with DAGetElements()

 39:     Not Collective

 41:    Input Parameter:
 42: +     da - the DA object
 43: .     n - number of local elements
 44: -     e - the indices of the elements vertices

 46:    Level: intermediate

 48: .seealso: DAElementType, DASetElementType(), DAGetElements()
 49: @*/
 50: PetscErrorCode  DARestoreElements(DA da,PetscInt *n,const PetscInt *e[])
 51: {
 55:   if (da->ops->restoreelements) {
 56:     (da->ops->restoreelements)(da,n,e);
 57:   }
 58:   return(0);
 59: }

 63: /*@C
 64:       DAGetOwnershipRange - Gets the ranges of indices in the x, y and z direction that are owned by each process

 66:     Not Collective

 68:    Input Parameter:
 69: .     da - the DA object

 71:    Output Parameter:
 72: +     lx - ownership along x direction (optional)
 73: .     ly - ownership along y direction (optional)
 74: -     lz - ownership along z direction (optional)

 76:    Level: intermediate

 78:     Note: these correspond to the optional final arguments passed to DACreate(), DACreate2d(), DACreate3d()

 80:     In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
 81:     eighth arguments from DAGetInfo()

 83: .seealso: DAGetCorners(), DAGetGhostCorners(), DACreate(), DACreate1d(), DACreate2d(), DACreate3d()
 84: @*/
 85: PetscErrorCode  DAGetOwnershipRange(DA da,PetscInt **lx,PetscInt **ly,PetscInt **lz)
 86: {
 89:   if (lx) *lx = da->lx;
 90:   if (ly) *ly = da->ly;
 91:   if (lz) *lz = da->lz;
 92:   return(0);
 93: }

 97: PetscErrorCode DAView_2d(DA da,PetscViewer viewer)
 98: {
100:   PetscMPIInt    rank;
101:   PetscTruth     iascii,isdraw;

104:   MPI_Comm_rank(((PetscObject)da)->comm,&rank);

106:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
107:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
108:   if (iascii) {
109:     PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,da->M,
110:                              da->N,da->m,da->n,da->w,da->s);
111:     PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",da->xs,da->xe,da->ys,da->ye);
112:     PetscViewerFlush(viewer);
113:   } else if (isdraw) {
114:     PetscDraw       draw;
115:     double     ymin = -1*da->s-1,ymax = da->N+da->s;
116:     double     xmin = -1*da->s-1,xmax = da->M+da->s;
117:     double     x,y;
118:     PetscInt   base,*idx;
119:     char       node[10];
120:     PetscTruth isnull;
121: 
122:     PetscViewerDrawGetDraw(viewer,0,&draw);
123:     PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
124:     if (!da->coordinates) {
125:       PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
126:     }
127:     PetscDrawSynchronizedClear(draw);

129:     /* first processor draw all node lines */
130:     if (!rank) {
131:       ymin = 0.0; ymax = da->N - 1;
132:       for (xmin=0; xmin<da->M; xmin++) {
133:         PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);
134:       }
135:       xmin = 0.0; xmax = da->M - 1;
136:       for (ymin=0; ymin<da->N; ymin++) {
137:         PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
138:       }
139:     }
140:     PetscDrawSynchronizedFlush(draw);
141:     PetscDrawPause(draw);

143:     /* draw my box */
144:     ymin = da->ys; ymax = da->ye - 1; xmin = da->xs/da->w;
145:     xmax =(da->xe-1)/da->w;
146:     PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
147:     PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
148:     PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
149:     PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);

151:     /* put in numbers */
152:     base = (da->base)/da->w;
153:     for (y=ymin; y<=ymax; y++) {
154:       for (x=xmin; x<=xmax; x++) {
155:         sprintf(node,"%d",(int)base++);
156:         PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);
157:       }
158:     }

160:     PetscDrawSynchronizedFlush(draw);
161:     PetscDrawPause(draw);
162:     /* overlay ghost numbers, useful for error checking */
163:     /* put in numbers */

165:     base = 0; idx = da->idx;
166:     ymin = da->Ys; ymax = da->Ye; xmin = da->Xs; xmax = da->Xe;
167:     for (y=ymin; y<ymax; y++) {
168:       for (x=xmin; x<xmax; x++) {
169:         if ((base % da->w) == 0) {
170:           sprintf(node,"%d",(int)(idx[base]/da->w));
171:           PetscDrawString(draw,x/da->w,y,PETSC_DRAW_BLUE,node);
172:         }
173:         base++;
174:       }
175:     }
176:     PetscDrawSynchronizedFlush(draw);
177:     PetscDrawPause(draw);
178:   } else {
179:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for DA2d",((PetscObject)viewer)->type_name);
180:   }
181:   return(0);
182: }

184: #if 0
187: PetscErrorCode DAPublish_Petsc(PetscObject obj)
188: {
190:   return(0);
191: }
192: #endif

196: PetscErrorCode DAGetElements_2d_P1(DA da,PetscInt *n,const PetscInt *e[])
197: {
199:   PetscInt       i,j,cnt,xs,xe = da->xe,ys,ye = da->ye,Xs = da->Xs, Xe = da->Xe, Ys = da->Ys;

202:   if (!da->e) {
203:     if (da->xs == Xs) xs = da->xs; else xs = da->xs - 1;
204:     if (da->ys == Ys) ys = da->ys; else ys = da->ys - 1;
205:     da->ne = 2*(xe - xs - 1)*(ye - ys - 1);
206:     PetscMalloc((1 + 3*da->ne)*sizeof(PetscInt),&da->e);
207:     cnt    = 0;
208:     for (j=ys; j<ye-1; j++) {
209:       for (i=xs; i<xe-1; i++) {
210:         da->e[cnt]   = i - Xs + (j - Ys)*(Xe - Xs);
211:         da->e[cnt+1] = i - Xs + 1 + (j - Ys)*(Xe - Xs);
212:         da->e[cnt+2] = i - Xs + (j - Ys + 1)*(Xe - Xs);

214:         da->e[cnt+3] = i - Xs + 1 + (j - Ys + 1)*(Xe - Xs);
215:         da->e[cnt+4] = i - Xs + (j - Ys + 1)*(Xe - Xs);
216:         da->e[cnt+5] = i - Xs + 1 + (j - Ys)*(Xe - Xs);
217:         cnt += 6;
218:       }
219:     }
220:   }
221:   *n = da->ne;
222:   *e = da->e;
223:   return(0);
224: }


229: /*@C
230:    DACreate2d -  Creates an object that will manage the communication of  two-dimensional 
231:    regular array data that is distributed across some processors.

233:    Collective on MPI_Comm

235:    Input Parameters:
236: +  comm - MPI communicator
237: .  wrap - type of periodicity should the array have. 
238:          Use one of DA_NONPERIODIC, DA_XPERIODIC, DA_YPERIODIC, or DA_XYPERIODIC.
239: .  stencil_type - stencil type.  Use either DA_STENCIL_BOX or DA_STENCIL_STAR.
240: .  M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value 
241:             from the command line with -da_grid_x <M> -da_grid_y <N>)
242: .  m,n - corresponding number of processors in each dimension 
243:          (or PETSC_DECIDE to have calculated)
244: .  dof - number of degrees of freedom per node
245: .  s - stencil width
246: -  lx, ly - arrays containing the number of nodes in each cell along
247:            the x and y coordinates, or PETSC_NULL. If non-null, these
248:            must be of length as m and n, and the corresponding
249:            m and n cannot be PETSC_DECIDE. The sum of the lx[] entries
250:            must be M, and the sum of the ly[] entries must be N.

252:    Output Parameter:
253: .  inra - the resulting distributed array object

255:    Options Database Key:
256: +  -da_view - Calls DAView() at the conclusion of DACreate2d()
257: .  -da_grid_x <nx> - number of grid points in x direction, if M < 0
258: .  -da_grid_y <ny> - number of grid points in y direction, if N < 0
259: .  -da_processors_x <nx> - number of processors in x direction
260: .  -da_processors_y <ny> - number of processors in y direction
261: .  -da_refine_x - refinement ratio in x direction
262: -  -da_refine_y - refinement ratio in y direction

264:    Level: beginner

266:    Notes:
267:    The stencil type DA_STENCIL_STAR with width 1 corresponds to the 
268:    standard 5-pt stencil, while DA_STENCIL_BOX with width 1 denotes
269:    the standard 9-pt stencil.

271:    The array data itself is NOT stored in the DA, it is stored in Vec objects;
272:    The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
273:    and DACreateLocalVector() and calls to VecDuplicate() if more are needed.

275: .keywords: distributed array, create, two-dimensional

277: .seealso: DADestroy(), DAView(), DACreate1d(), DACreate3d(), DAGlobalToLocalBegin(), DAGetRefinementFactor(),
278:           DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(), DASetRefinementFactor(),
279:           DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView(), DAGetOwnershipRange()

281: @*/
282: PetscErrorCode  DACreate2d(MPI_Comm comm,DAPeriodicType wrap,DAStencilType stencil_type,
283:                           PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,PetscInt *lx,PetscInt *ly,DA *inra)
284: {
286:   PetscMPIInt    rank,size;
287:   PetscInt       xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end;
288:   PetscInt       up,down,left,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn;
289:   PetscInt       xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count;
290:   PetscInt       s_x,s_y; /* s proportionalized to w */
291:   PetscInt       *flx = 0,*fly = 0;
292:   PetscInt       sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0,refine_x = 2, refine_y = 2,tM = M,tN = N;
293:   DA             da;
294:   Vec            local,global;
295:   VecScatter     ltog,gtol;
296:   IS             to,from;

300:   *inra = 0;
301: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
302:   DMInitializePackage(PETSC_NULL);
303: #endif

305:   if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
306:   if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);

308:   PetscOptionsBegin(comm,PETSC_NULL,"2d DA Options","DA");
309:     if (M < 0){
310:       tM = -M;
311:       PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate2d",tM,&tM,PETSC_NULL);
312:     }
313:     if (N < 0){
314:       tN = -N;
315:       PetscOptionsInt("-da_grid_y","Number of grid points in y direction","DACreate2d",tN,&tN,PETSC_NULL);
316:     }
317:     PetscOptionsInt("-da_processors_x","Number of processors in x direction","DACreate2d",m,&m,PETSC_NULL);
318:     PetscOptionsInt("-da_processors_y","Number of processors in y direction","DACreate2d",n,&n,PETSC_NULL);
319:     PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DASetRefinementFactor",refine_x,&refine_x,PETSC_NULL);
320:     PetscOptionsInt("-da_refine_y","Refinement ratio in y direction","DASetRefinementFactor",refine_y,&refine_y,PETSC_NULL);
321:   PetscOptionsEnd();
322:   M = tM; N = tN;

324:   PetscHeaderCreate(da,_p_DA,struct _DAOps,DA_COOKIE,0,"DA",comm,DADestroy,DAView);
325:   da->ops->createglobalvector = DACreateGlobalVector;
326:   da->ops->globaltolocalbegin = DAGlobalToLocalBegin;
327:   da->ops->globaltolocalend   = DAGlobalToLocalEnd;
328:   da->ops->localtoglobal      = DALocalToGlobal;
329:   da->ops->getinterpolation   = DAGetInterpolation;
330:   da->ops->getcoloring        = DAGetColoring;
331:   da->ops->getmatrix          = DAGetMatrix;
332:   da->ops->refine             = DARefine;
333:   da->ops->coarsen            = DACoarsen;
334:   da->ops->getinjection       = DAGetInjection;
335:   da->ops->getaggregates      = DAGetAggregates;
336:   da->ops->getelements        = DAGetElements_2d_P1;
337:   da->elementtype             = DA_ELEMENT_P1;

339:   da->dim        = 2;
340:   da->interptype = DA_Q1;
341:   da->refine_x   = refine_x;
342:   da->refine_y   = refine_y;
343:   PetscMalloc(dof*sizeof(char*),&da->fieldname);
344:   PetscMemzero(da->fieldname,dof*sizeof(char*));

346:   MPI_Comm_size(comm,&size);
347:   MPI_Comm_rank(comm,&rank);

349:   if (m != PETSC_DECIDE) {
350:     if (m < 1) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m);}
351:     else if (m > size) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size);}
352:   }
353:   if (n != PETSC_DECIDE) {
354:     if (n < 1) {SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n);}
355:     else if (n > size) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size);}
356:   }

358:   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
359:     if (n != PETSC_DECIDE) {
360:       m = size/n;
361:     } else if (m != PETSC_DECIDE) {
362:       n = size/m;
363:     } else {
364:       /* try for squarish distribution */
365:       m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N)));
366:       if (!m) m = 1;
367:       while (m > 0) {
368:         n = size/m;
369:         if (m*n == size) break;
370:         m--;
371:       }
372:       if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
373:     }
374:     if (m*n != size) SETERRQ(PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n ");
375:   } else if (m*n != size) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition");

377:   if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m);
378:   if (N < n) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n);

380:   /* 
381:      Determine locally owned region 
382:      xs is the first local node number, x is the number of local nodes 
383:   */
384:   if (!lx) { /* user sets distribution */
385:     PetscMalloc(m*sizeof(PetscInt),&lx);
386:     flx = lx;
387:     for (i=0; i<m; i++) {
388:       lx[i] = M/m + ((M % m) > i);
389:     }
390:   }
391:   x  = lx[rank % m];
392:   xs = 0;
393:   for (i=0; i<(rank % m); i++) {
394:     xs += lx[i];
395:   }
396: #if defined(PETSC_USE_DEBUG)
397:   left = xs;
398:   for (i=(rank % m); i<m; i++) {
399:     left += lx[i];
400:   }
401:   if (left != M) {
402:     SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M);
403:   }
404: #endif

406:   /* 
407:      Determine locally owned region 
408:      ys is the first local node number, y is the number of local nodes 
409:   */
410:   if (!ly) { /* user sets distribution */
411:     PetscMalloc(n*sizeof(PetscInt),&ly);
412:     fly  = ly;
413:     for (i=0; i<n; i++) {
414:       ly[i] = N/n + ((N % n) > i);
415:     }
416:   }
417:   y  = ly[rank/m];
418:   ys = 0;
419:   for (i=0; i<(rank/m); i++) {
420:     ys += ly[i];
421:   }
422: #if defined(PETSC_USE_DEBUG)
423:   left = ys;
424:   for (i=(rank/m); i<n; i++) {
425:     left += ly[i];
426:   }
427:   if (left != N) {
428:     SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N);
429:   }
430: #endif

432:   if (x < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
433:   if (y < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s);
434:   xe = xs + x;
435:   ye = ys + y;

437:   /* determine ghost region */
438:   /* Assume No Periodicity */
439:   if (xs-s > 0) Xs = xs - s; else Xs = 0;
440:   if (ys-s > 0) Ys = ys - s; else Ys = 0;
441:   if (xe+s <= M) Xe = xe + s; else Xe = M;
442:   if (ye+s <= N) Ye = ye + s; else Ye = N;

444:   /* X Periodic */
445:   if (DAXPeriodic(wrap)){
446:     Xs = xs - s;
447:     Xe = xe + s;
448:   }

450:   /* Y Periodic */
451:   if (DAYPeriodic(wrap)){
452:     Ys = ys - s;
453:     Ye = ye + s;
454:   }

456:   /* Resize all X parameters to reflect w */
457:   x   *= dof;
458:   xs  *= dof;
459:   xe  *= dof;
460:   Xs  *= dof;
461:   Xe  *= dof;
462:   s_x = s*dof;
463:   s_y = s;

465:   /* determine starting point of each processor */
466:   nn    = x*y;
467:   PetscMalloc((2*size+1)*sizeof(PetscInt),&bases);
468:   ldims = bases+size+1;
469:   MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);
470:   bases[0] = 0;
471:   for (i=1; i<=size; i++) {
472:     bases[i] = ldims[i-1];
473:   }
474:   for (i=1; i<=size; i++) {
475:     bases[i] += bases[i-1];
476:   }

478:   /* allocate the base parallel and sequential vectors */
479:   da->Nlocal = x*y;
480:   VecCreateMPIWithArray(comm,da->Nlocal,PETSC_DECIDE,0,&global);
481:   VecSetBlockSize(global,dof);
482:   da->nlocal = (Xe-Xs)*(Ye-Ys);
483:   VecCreateSeqWithArray(PETSC_COMM_SELF,da->nlocal,0,&local);
484:   VecSetBlockSize(local,dof);


487:   /* generate appropriate vector scatters */
488:   /* local to global inserts non-ghost point region into global */
489:   VecGetOwnershipRange(global,&start,&end);
490:   ISCreateStride(comm,x*y,start,1,&to);

492:   left  = xs - Xs; down  = ys - Ys; up    = down + y;
493:   PetscMalloc(x*(up - down)*sizeof(PetscInt),&idx);
494:   count = 0;
495:   for (i=down; i<up; i++) {
496:     for (j=0; j<x/dof; j++) {
497:       idx[count++] = left + i*(Xe-Xs) + j*dof;
498:     }
499:   }
500:   ISCreateBlock(comm,dof,count,idx,&from);
501:   PetscFree(idx);

503:   VecScatterCreate(local,from,global,to,&ltog);
504:   PetscLogObjectParent(da,to);
505:   PetscLogObjectParent(da,from);
506:   PetscLogObjectParent(da,ltog);
507:   ISDestroy(from);
508:   ISDestroy(to);

510:   /* global to local must include ghost points */
511:   if (stencil_type == DA_STENCIL_BOX) {
512:     ISCreateStride(comm,(Xe-Xs)*(Ye-Ys),0,1,&to);
513:   } else {
514:     /* must drop into cross shape region */
515:     /*       ---------|
516:             |  top    |
517:          |---         ---|
518:          |   middle      |
519:          |               |
520:          ----         ----
521:             | bottom  |
522:             -----------
523:         Xs xs        xe  Xe */
524:     /* bottom */
525:     left  = xs - Xs; down = ys - Ys; up    = down + y;
526:     count = down*(xe-xs) + (up-down)*(Xe-Xs) + (Ye-Ys-up)*(xe-xs);
527:     PetscMalloc(count*sizeof(PetscInt)/dof,&idx);
528:     count = 0;
529:     for (i=0; i<down; i++) {
530:       for (j=0; j<xe-xs; j += dof) {
531:         idx[count++] = left + i*(Xe-Xs) + j;
532:       }
533:     }
534:     /* middle */
535:     for (i=down; i<up; i++) {
536:       for (j=0; j<Xe-Xs; j += dof) {
537:         idx[count++] = i*(Xe-Xs) + j;
538:       }
539:     }
540:     /* top */
541:     for (i=up; i<Ye-Ys; i++) {
542:       for (j=0; j<xe-xs; j += dof) {
543:         idx[count++] = left + i*(Xe-Xs) + j;
544:       }
545:     }
546:     ISCreateBlock(comm,dof,count,idx,&to);
547:     PetscFree(idx);
548:   }


551:   /* determine who lies on each side of us stored in    n6 n7 n8
552:                                                         n3    n5
553:                                                         n0 n1 n2
554:   */

556:   /* Assume the Non-Periodic Case */
557:   n1 = rank - m;
558:   if (rank % m) {
559:     n0 = n1 - 1;
560:   } else {
561:     n0 = -1;
562:   }
563:   if ((rank+1) % m) {
564:     n2 = n1 + 1;
565:     n5 = rank + 1;
566:     n8 = rank + m + 1; if (n8 >= m*n) n8 = -1;
567:   } else {
568:     n2 = -1; n5 = -1; n8 = -1;
569:   }
570:   if (rank % m) {
571:     n3 = rank - 1;
572:     n6 = n3 + m; if (n6 >= m*n) n6 = -1;
573:   } else {
574:     n3 = -1; n6 = -1;
575:   }
576:   n7 = rank + m; if (n7 >= m*n) n7 = -1;


579:   /* Modify for Periodic Cases */
580:   if (wrap == DA_YPERIODIC) {  /* Handle Top and Bottom Sides */
581:     if (n1 < 0) n1 = rank + m * (n-1);
582:     if (n7 < 0) n7 = rank - m * (n-1);
583:     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
584:     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
585:     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
586:     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
587:   } else if (wrap == DA_XPERIODIC) { /* Handle Left and Right Sides */
588:     if (n3 < 0) n3 = rank + (m-1);
589:     if (n5 < 0) n5 = rank - (m-1);
590:     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
591:     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
592:     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
593:     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
594:   } else if (wrap == DA_XYPERIODIC) {

596:     /* Handle all four corners */
597:     if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1;
598:     if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0;
599:     if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m;
600:     if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1;

602:     /* Handle Top and Bottom Sides */
603:     if (n1 < 0) n1 = rank + m * (n-1);
604:     if (n7 < 0) n7 = rank - m * (n-1);
605:     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
606:     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
607:     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
608:     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;

610:     /* Handle Left and Right Sides */
611:     if (n3 < 0) n3 = rank + (m-1);
612:     if (n5 < 0) n5 = rank - (m-1);
613:     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
614:     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
615:     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
616:     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
617:   }

619:   if (stencil_type == DA_STENCIL_STAR) {
620:     /* save corner processor numbers */
621:     sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8;
622:     n0 = n2 = n6 = n8 = -1;
623:   }

625:   PetscMalloc((x+2*s_x)*(y+2*s_y)*sizeof(PetscInt),&idx);
626:   PetscLogObjectMemory(da,(x+2*s_x)*(y+2*s_y)*sizeof(PetscInt));
627:   nn = 0;

629:   xbase = bases[rank];
630:   for (i=1; i<=s_y; i++) {
631:     if (n0 >= 0) { /* left below */
632:       x_t = lx[n0 % m]*dof;
633:       y_t = ly[(n0/m)];
634:       s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
635:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
636:     }
637:     if (n1 >= 0) { /* directly below */
638:       x_t = x;
639:       y_t = ly[(n1/m)];
640:       s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
641:       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
642:     }
643:     if (n2 >= 0) { /* right below */
644:       x_t = lx[n2 % m]*dof;
645:       y_t = ly[(n2/m)];
646:       s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
647:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
648:     }
649:   }

651:   for (i=0; i<y; i++) {
652:     if (n3 >= 0) { /* directly left */
653:       x_t = lx[n3 % m]*dof;
654:       /* y_t = y; */
655:       s_t = bases[n3] + (i+1)*x_t - s_x;
656:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
657:     }

659:     for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */

661:     if (n5 >= 0) { /* directly right */
662:       x_t = lx[n5 % m]*dof;
663:       /* y_t = y; */
664:       s_t = bases[n5] + (i)*x_t;
665:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
666:     }
667:   }

669:   for (i=1; i<=s_y; i++) {
670:     if (n6 >= 0) { /* left above */
671:       x_t = lx[n6 % m]*dof;
672:       /* y_t = ly[(n6/m)]; */
673:       s_t = bases[n6] + (i)*x_t - s_x;
674:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
675:     }
676:     if (n7 >= 0) { /* directly above */
677:       x_t = x;
678:       /* y_t = ly[(n7/m)]; */
679:       s_t = bases[n7] + (i-1)*x_t;
680:       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
681:     }
682:     if (n8 >= 0) { /* right above */
683:       x_t = lx[n8 % m]*dof;
684:       /* y_t = ly[(n8/m)]; */
685:       s_t = bases[n8] + (i-1)*x_t;
686:       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
687:     }
688:   }

690:   base = bases[rank];
691:   {
692:     PetscInt nnn = nn/dof,*iidx;
693:     PetscMalloc(nnn*sizeof(PetscInt),&iidx);
694:     for (i=0; i<nnn; i++) {
695:       iidx[i] = idx[dof*i];
696:     }
697:     ISCreateBlock(comm,dof,nnn,iidx,&from);
698:     PetscFree(iidx);
699:   }
700:   VecScatterCreate(global,from,local,to,&gtol);
701:   PetscLogObjectParent(da,to);
702:   PetscLogObjectParent(da,from);
703:   PetscLogObjectParent(da,gtol);
704:   ISDestroy(to);
705:   ISDestroy(from);

707:   if (stencil_type == DA_STENCIL_STAR) {
708:     /*
709:         Recompute the local to global mappings, this time keeping the 
710:       information about the cross corner processor numbers.
711:     */
712:     n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8;
713:     nn = 0;
714:     xbase = bases[rank];
715:     for (i=1; i<=s_y; i++) {
716:       if (n0 >= 0) { /* left below */
717:         x_t = lx[n0 % m]*dof;
718:         y_t = ly[(n0/m)];
719:         s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
720:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
721:       }
722:       if (n1 >= 0) { /* directly below */
723:         x_t = x;
724:         y_t = ly[(n1/m)];
725:         s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
726:         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
727:       }
728:       if (n2 >= 0) { /* right below */
729:         x_t = lx[n2 % m]*dof;
730:         y_t = ly[(n2/m)];
731:         s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
732:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
733:       }
734:     }

736:     for (i=0; i<y; i++) {
737:       if (n3 >= 0) { /* directly left */
738:         x_t = lx[n3 % m]*dof;
739:         /* y_t = y; */
740:         s_t = bases[n3] + (i+1)*x_t - s_x;
741:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
742:       }

744:       for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */

746:       if (n5 >= 0) { /* directly right */
747:         x_t = lx[n5 % m]*dof;
748:         /* y_t = y; */
749:         s_t = bases[n5] + (i)*x_t;
750:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
751:       }
752:     }

754:     for (i=1; i<=s_y; i++) {
755:       if (n6 >= 0) { /* left above */
756:         x_t = lx[n6 % m]*dof;
757:         /* y_t = ly[(n6/m)]; */
758:         s_t = bases[n6] + (i)*x_t - s_x;
759:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
760:       }
761:       if (n7 >= 0) { /* directly above */
762:         x_t = x;
763:         /* y_t = ly[(n7/m)]; */
764:         s_t = bases[n7] + (i-1)*x_t;
765:         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
766:       }
767:       if (n8 >= 0) { /* right above */
768:         x_t = lx[n8 % m]*dof;
769:         /* y_t = ly[(n8/m)]; */
770:         s_t = bases[n8] + (i-1)*x_t;
771:         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
772:       }
773:     }
774:   }
775:   PetscFree(bases);

777:   da->M  = M;  da->N  = N;  da->m  = m;  da->n  = n;  da->w = dof;  da->s = s;
778:   da->xs = xs; da->xe = xe; da->ys = ys; da->ye = ye; da->zs = 0; da->ze = 1;
779:   da->Xs = Xs; da->Xe = Xe; da->Ys = Ys; da->Ye = Ye; da->Zs = 0; da->Ze = 1;
780:   da->P  = 1;  da->p  = 1;

782:   VecDestroy(local);
783:   VecDestroy(global);

785:   da->gtol         = gtol;
786:   da->ltog         = ltog;
787:   da->idx          = idx;
788:   da->Nl           = nn;
789:   da->base         = base;
790:   da->wrap         = wrap;
791:   da->ops->view    = DAView_2d;
792:   da->stencil_type = stencil_type;

794:   /* 
795:      Set the local to global ordering in the global vector, this allows use
796:      of VecSetValuesLocal().
797:   */
798:   ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
799:   ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
800:   PetscLogObjectParent(da,da->ltogmap);

802:   *inra = da;

804:   da->ltol = PETSC_NULL;
805:   da->ao   = PETSC_NULL;


808:   if (!flx) {
809:     PetscMalloc(m*sizeof(PetscInt),&flx);
810:     PetscMemcpy(flx,lx,m*sizeof(PetscInt));
811:   }
812:   if (!fly) {
813:     PetscMalloc(n*sizeof(PetscInt),&fly);
814:     PetscMemcpy(fly,ly,n*sizeof(PetscInt));
815:   }
816:   da->lx = flx;
817:   da->ly = fly;
818:   DAView_Private(da);
819:   PetscPublishAll(da);
820:   return(0);
821: }

825: /*@
826:    DARefine - Creates a new distributed array that is a refinement of a given
827:    distributed array.

829:    Collective on DA

831:    Input Parameter:
832: +  da - initial distributed array
833: -  comm - communicator to contain refined DA, must be either same as the da communicator or include the 
834:           da communicator and be 2, 4, or 8 times larger. Currently ignored

836:    Output Parameter:
837: .  daref - refined distributed array

839:    Level: advanced

841:    Note:
842:    Currently, refinement consists of just doubling the number of grid spaces
843:    in each dimension of the DA.

845: .keywords:  distributed array, refine

847: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetOwnershipRange()
848: @*/
849: PetscErrorCode  DARefine(DA da,MPI_Comm comm,DA *daref)
850: {
852:   PetscInt       M,N,P;
853:   DA             da2;


859:   if (DAXPeriodic(da->wrap) || da->interptype == DA_Q0){
860:     M = da->refine_x*da->M;
861:   } else {
862:     M = 1 + da->refine_x*(da->M - 1);
863:   }
864:   if (DAYPeriodic(da->wrap) || da->interptype == DA_Q0){
865:     N = da->refine_y*da->N;
866:   } else {
867:     N = 1 + da->refine_y*(da->N - 1);
868:   }
869:   if (DAZPeriodic(da->wrap) || da->interptype == DA_Q0){
870:     P = da->refine_z*da->P;
871:   } else {
872:     P = 1 + da->refine_z*(da->P - 1);
873:   }
874:   DACreate(((PetscObject)da)->comm,da->dim,da->wrap,da->stencil_type,M,N,P,da->m,da->n,da->p,da->w,da->s,0,0,0,&da2);

876:   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
877:   da2->ops->getmatrix        = da->ops->getmatrix;
878:   da2->ops->getinterpolation = da->ops->getinterpolation;
879:   da2->ops->getcoloring      = da->ops->getcoloring;
880:   da2->interptype            = da->interptype;
881: 
882:   /* copy fill information if given */
883:   if (da->dfill) {
884:     PetscMalloc((da->dfill[da->w]+da->w+1)*sizeof(PetscInt),&da2->dfill);
885:     PetscMemcpy(da2->dfill,da->dfill,(da->dfill[da->w]+da->w+1)*sizeof(PetscInt));
886:   }
887:   if (da->ofill) {
888:     PetscMalloc((da->ofill[da->w]+da->w+1)*sizeof(PetscInt),&da2->ofill);
889:     PetscMemcpy(da2->ofill,da->ofill,(da->ofill[da->w]+da->w+1)*sizeof(PetscInt));
890:   }
891:   /* copy the refine information */
892:   da2->refine_x = da->refine_x;
893:   da2->refine_y = da->refine_y;
894:   da2->refine_z = da->refine_z;
895:   *daref = da2;
896:   return(0);
897: }

901: /*@
902:    DACoarsen - Creates a new distributed array that is a coarsenment of a given
903:    distributed array.

905:    Collective on DA

907:    Input Parameter:
908: +  da - initial distributed array
909: -  comm - communicator to contain coarsend DA. Currently ignored

911:    Output Parameter:
912: .  daref - coarsend distributed array

914:    Level: advanced

916:    Note:
917:    Currently, coarsenment consists of just dividing the number of grid spaces
918:    in each dimension of the DA by refinex_x, refinex_y, ....

920: .keywords:  distributed array, coarsen

922: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetOwnershipRange()
923: @*/
924: PetscErrorCode  DACoarsen(DA da, MPI_Comm comm,DA *daref)
925: {
927:   PetscInt       M,N,P;
928:   DA             da2;


934:   if (DAXPeriodic(da->wrap) || da->interptype == DA_Q0){
935:     if(da->refine_x)
936:       M = da->M / da->refine_x;
937:     else
938:       M = da->M;
939:   } else {
940:     if(da->refine_x)
941:       M = 1 + (da->M - 1) / da->refine_x;
942:     else
943:       M = da->M;
944:   }
945:   if (DAYPeriodic(da->wrap) || da->interptype == DA_Q0){
946:     if(da->refine_y)
947:       N = da->N / da->refine_y;
948:     else
949:       N = da->N;
950:   } else {
951:     if(da->refine_y)
952:       N = 1 + (da->N - 1) / da->refine_y;
953:     else
954:       N = da->M;
955:   }
956:   if (DAZPeriodic(da->wrap) || da->interptype == DA_Q0){
957:     if(da->refine_z)
958:       P = da->P / da->refine_z;
959:     else
960:       P = da->P;
961:   } else {
962:     if(da->refine_z)
963:       P = 1 + (da->P - 1) / da->refine_z;
964:     else
965:       P = da->P;
966:   }
967:   DACreate(((PetscObject)da)->comm,da->dim,da->wrap,da->stencil_type,M,N,P,da->m,da->n,da->p,da->w,da->s,0,0,0,&da2);

969:   /* allow overloaded (user replaced) operations to be inherited by refinement clones */
970:   da2->ops->getmatrix        = da->ops->getmatrix;
971:   da2->ops->getinterpolation = da->ops->getinterpolation;
972:   da2->ops->getcoloring      = da->ops->getcoloring;
973:   da2->interptype            = da->interptype;
974: 
975:   /* copy fill information if given */
976:   if (da->dfill) {
977:     PetscMalloc((da->dfill[da->w]+da->w+1)*sizeof(PetscInt),&da2->dfill);
978:     PetscMemcpy(da2->dfill,da->dfill,(da->dfill[da->w]+da->w+1)*sizeof(PetscInt));
979:   }
980:   if (da->ofill) {
981:     PetscMalloc((da->ofill[da->w]+da->w+1)*sizeof(PetscInt),&da2->ofill);
982:     PetscMemcpy(da2->ofill,da->ofill,(da->ofill[da->w]+da->w+1)*sizeof(PetscInt));
983:   }
984:   /* copy the refine information */
985:   da2->refine_x = da->refine_x;
986:   da2->refine_y = da->refine_y;
987:   da2->refine_z = da->refine_z;
988:   *daref = da2;
989:   return(0);
990: }

992: /*@
993:      DASetRefinementFactor - Set the ratios that the DA grid is refined

995:     Collective on DA

997:   Input Parameters:
998: +    da - the DA object
999: .    refine_x - ratio of fine grid to coarse in x direction (2 by default)
1000: .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
1001: -    refine_z - ratio of fine grid to coarse in z direction (2 by default)

1003:   Options Database:
1004: +  -da_refine_x - refinement ratio in x direction
1005: .  -da_refine_y - refinement ratio in y direction
1006: -  -da_refine_y - refinement ratio in z direction

1008:   Level: intermediate

1010:     Notes: Pass PETSC_IGNORE to leave a value unchanged

1012: .seealso: DARefine(), DAGetRefinementFactor()
1013: @*/
1014: PetscErrorCode  DASetRefinementFactor(DA da, PetscInt refine_x, PetscInt refine_y,PetscInt refine_z)
1015: {
1017:   if (refine_x > 0) da->refine_x = refine_x;
1018:   if (refine_y > 0) da->refine_y = refine_y;
1019:   if (refine_z > 0) da->refine_z = refine_z;
1020:   return(0);
1021: }

1023: /*@C
1024:      DAGetRefinementFactor - Gets the ratios that the DA grid is refined

1026:     Not Collective

1028:   Input Parameter:
1029: .    da - the DA object

1031:   Output Parameters:
1032: +    refine_x - ratio of fine grid to coarse in x direction (2 by default)
1033: .    refine_y - ratio of fine grid to coarse in y direction (2 by default)
1034: -    refine_z - ratio of fine grid to coarse in z direction (2 by default)

1036:   Level: intermediate

1038:     Notes: Pass PETSC_NULL for values you do not need

1040: .seealso: DARefine(), DASetRefinementFactor()
1041: @*/
1042: PetscErrorCode  DAGetRefinementFactor(DA da, PetscInt *refine_x, PetscInt *refine_y,PetscInt *refine_z)
1043: {
1045:   if (refine_x) *refine_x = da->refine_x;
1046:   if (refine_y) *refine_y = da->refine_y;
1047:   if (refine_z) *refine_z = da->refine_z;
1048:   return(0);
1049: }

1051: /*@C
1052:      DASetGetMatrix - Sets the routine used by the DA to allocate a matrix.

1054:     Collective on DA

1056:   Input Parameters:
1057: +    da - the DA object
1058: -    f - the function that allocates the matrix for that specific DA

1060:   Level: developer

1062:    Notes: See DASetBlockFills() that provides a simple way to provide the nonzero structure for 
1063:        the diagonal and off-diagonal blocks of the matrix

1065: .seealso: DAGetMatrix(), DASetBlockFills()
1066: @*/
1067: PetscErrorCode  DASetGetMatrix(DA da,PetscErrorCode (*f)(DA, MatType,Mat*))
1068: {
1070:   da->ops->getmatrix = f;
1071:   return(0);
1072: }

1074: /*
1075:       M is number of grid points 
1076:       m is number of processors

1078: */
1081: PetscErrorCode  DASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm)
1082: {
1084:   PetscInt       m,n = 0,x = 0,y = 0;
1085:   PetscMPIInt    size,csize,rank;

1088:   MPI_Comm_size(comm,&size);
1089:   MPI_Comm_rank(comm,&rank);

1091:   csize = 4*size;
1092:   do {
1093:     if (csize % 4) SETERRQ4(PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y);
1094:     csize   = csize/4;
1095: 
1096:     m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N)));
1097:     if (!m) m = 1;
1098:     while (m > 0) {
1099:       n = csize/m;
1100:       if (m*n == csize) break;
1101:       m--;
1102:     }
1103:     if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}

1105:     x = M/m + ((M % m) > ((csize-1) % m));
1106:     y = (N + (csize-1)/m)/n;
1107:   } while ((x < 4 || y < 4) && csize > 1);
1108:   if (size != csize) {
1109:     MPI_Group    entire_group,sub_group;
1110:     PetscMPIInt  i,*groupies;

1112:     MPI_Comm_group(comm,&entire_group);
1113:     PetscMalloc(csize*sizeof(PetscInt),&groupies);
1114:     for (i=0; i<csize; i++) {
1115:       groupies[i] = (rank/csize)*csize + i;
1116:     }
1117:     MPI_Group_incl(entire_group,csize,groupies,&sub_group);
1118:     PetscFree(groupies);
1119:     MPI_Comm_create(comm,sub_group,outcomm);
1120:     MPI_Group_free(&entire_group);
1121:     MPI_Group_free(&sub_group);
1122:     PetscInfo1(0,"DASplitComm2d:Creating redundant coarse problems of size %d\n",csize);
1123:   } else {
1124:     *outcomm = comm;
1125:   }
1126:   return(0);
1127: }

1131: /*@C
1132:        DASetLocalFunction - Caches in a DA a local function. 

1134:    Collective on DA

1136:    Input Parameter:
1137: +  da - initial distributed array
1138: -  lf - the local function

1140:    Level: intermediate

1142:    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.

1144: .keywords:  distributed array, refine

1146: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunctioni()
1147: @*/
1148: PetscErrorCode  DASetLocalFunction(DA da,DALocalFunction1 lf)
1149: {
1152:   da->lf    = lf;
1153:   return(0);
1154: }

1158: /*@C
1159:        DASetLocalFunctioni - Caches in a DA a local function that evaluates a single component

1161:    Collective on DA

1163:    Input Parameter:
1164: +  da - initial distributed array
1165: -  lfi - the local function

1167:    Level: intermediate

1169: .keywords:  distributed array, refine

1171: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1172: @*/
1173: PetscErrorCode  DASetLocalFunctioni(DA da,PetscErrorCode (*lfi)(DALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
1174: {
1177:   da->lfi = lfi;
1178:   return(0);
1179: }

1183: /*@C
1184:        DASetLocalFunctionib - Caches in a DA a block local function that evaluates a single component

1186:    Collective on DA

1188:    Input Parameter:
1189: +  da - initial distributed array
1190: -  lfi - the local function

1192:    Level: intermediate

1194: .keywords:  distributed array, refine

1196: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1197: @*/
1198: PetscErrorCode  DASetLocalFunctionib(DA da,PetscErrorCode (*lfi)(DALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
1199: {
1202:   da->lfib = lfi;
1203:   return(0);
1204: }

1208: PetscErrorCode DASetLocalAdicFunction_Private(DA da,DALocalFunction1 ad_lf)
1209: {
1212:   da->adic_lf = ad_lf;
1213:   return(0);
1214: }

1216: /*MC
1217:        DASetLocalAdicFunctioni - Caches in a DA a local functioni computed by ADIC/ADIFOR

1219:    Collective on DA

1221:    Synopsis:
1222:    PetscErrorCode DASetLocalAdicFunctioni(DA da,PetscInt (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1223:    
1224:    Input Parameter:
1225: +  da - initial distributed array
1226: -  ad_lfi - the local function as computed by ADIC/ADIFOR

1228:    Level: intermediate

1230: .keywords:  distributed array, refine

1232: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1233:           DASetLocalJacobian(), DASetLocalFunctioni()
1234: M*/

1238: PetscErrorCode DASetLocalAdicFunctioni_Private(DA da,PetscErrorCode (*ad_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1239: {
1242:   da->adic_lfi = ad_lfi;
1243:   return(0);
1244: }

1246: /*MC
1247:        DASetLocalAdicMFFunctioni - Caches in a DA a local functioni computed by ADIC/ADIFOR

1249:    Collective on DA

1251:    Synopsis:
1252:    PetscErrorCode  DASetLocalAdicFunctioni(DA da,int (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1253:    
1254:    Input Parameter:
1255: +  da - initial distributed array
1256: -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR

1258:    Level: intermediate

1260: .keywords:  distributed array, refine

1262: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1263:           DASetLocalJacobian(), DASetLocalFunctioni()
1264: M*/

1268: PetscErrorCode DASetLocalAdicMFFunctioni_Private(DA da,PetscErrorCode (*admf_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1269: {
1272:   da->adicmf_lfi = admf_lfi;
1273:   return(0);
1274: }

1276: /*MC
1277:        DASetLocalAdicFunctionib - Caches in a DA a block local functioni computed by ADIC/ADIFOR

1279:    Collective on DA

1281:    Synopsis:
1282:    PetscErrorCode DASetLocalAdicFunctionib(DA da,PetscInt (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1283:    
1284:    Input Parameter:
1285: +  da - initial distributed array
1286: -  ad_lfi - the local function as computed by ADIC/ADIFOR

1288:    Level: intermediate

1290: .keywords:  distributed array, refine

1292: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1293:           DASetLocalJacobian(), DASetLocalFunctionib()
1294: M*/

1298: PetscErrorCode DASetLocalAdicFunctionib_Private(DA da,PetscErrorCode (*ad_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1299: {
1302:   da->adic_lfib = ad_lfi;
1303:   return(0);
1304: }

1306: /*MC
1307:        DASetLocalAdicMFFunctionib - Caches in a DA a block local functioni computed by ADIC/ADIFOR

1309:    Collective on DA

1311:    Synopsis:
1312:    PetscErrorCode  DASetLocalAdicFunctionib(DA da,int (ad_lf*)(DALocalInfo*,MatStencil*,void*,void*,void*)
1313:    
1314:    Input Parameter:
1315: +  da - initial distributed array
1316: -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR

1318:    Level: intermediate

1320: .keywords:  distributed array, refine

1322: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1323:           DASetLocalJacobian(), DASetLocalFunctionib()
1324: M*/

1328: PetscErrorCode DASetLocalAdicMFFunctionib_Private(DA da,PetscErrorCode (*admf_lfi)(DALocalInfo*,MatStencil*,void*,void*,void*))
1329: {
1332:   da->adicmf_lfib = admf_lfi;
1333:   return(0);
1334: }

1336: /*MC
1337:        DASetLocalAdicMFFunction - Caches in a DA a local function computed by ADIC/ADIFOR

1339:    Collective on DA

1341:    Synopsis:
1342:    PetscErrorCode DASetLocalAdicMFFunction(DA da,DALocalFunction1 ad_lf)
1343:    
1344:    Input Parameter:
1345: +  da - initial distributed array
1346: -  ad_lf - the local function as computed by ADIC/ADIFOR

1348:    Level: intermediate

1350: .keywords:  distributed array, refine

1352: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction(),
1353:           DASetLocalJacobian()
1354: M*/

1358: PetscErrorCode DASetLocalAdicMFFunction_Private(DA da,DALocalFunction1 ad_lf)
1359: {
1362:   da->adicmf_lf = ad_lf;
1363:   return(0);
1364: }

1366: /*@C
1367:        DASetLocalJacobian - Caches in a DA a local Jacobian

1369:    Collective on DA

1371:    
1372:    Input Parameter:
1373: +  da - initial distributed array
1374: -  lj - the local Jacobian

1376:    Level: intermediate

1378:    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.

1380: .keywords:  distributed array, refine

1382: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalFunction()
1383: @*/
1386: PetscErrorCode  DASetLocalJacobian(DA da,DALocalFunction1 lj)
1387: {
1390:   da->lj    = lj;
1391:   return(0);
1392: }

1396: /*@C
1397:        DAGetLocalFunction - Gets from a DA a local function and its ADIC/ADIFOR Jacobian

1399:    Collective on DA

1401:    Input Parameter:
1402: .  da - initial distributed array

1404:    Output Parameter:
1405: .  lf - the local function

1407:    Level: intermediate

1409: .keywords:  distributed array, refine

1411: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalJacobian(), DASetLocalFunction()
1412: @*/
1413: PetscErrorCode  DAGetLocalFunction(DA da,DALocalFunction1 *lf)
1414: {
1417:   if (lf)       *lf = da->lf;
1418:   return(0);
1419: }

1423: /*@C
1424:        DAGetLocalJacobian - Gets from a DA a local jacobian

1426:    Collective on DA

1428:    Input Parameter:
1429: .  da - initial distributed array

1431:    Output Parameter:
1432: .  lj - the local jacobian

1434:    Level: intermediate

1436: .keywords:  distributed array, refine

1438: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DAGetLocalFunction(), DASetLocalJacobian()
1439: @*/
1440: PetscErrorCode  DAGetLocalJacobian(DA da,DALocalFunction1 *lj)
1441: {
1444:   if (lj) *lj = da->lj;
1445:   return(0);
1446: }

1450: /*@
1451:     DAFormFunction - Evaluates a user provided function on each processor that 
1452:         share a DA

1454:    Input Parameters:
1455: +    da - the DA that defines the grid
1456: .    vu - input vector
1457: .    vfu - output vector 
1458: -    w - any user data

1460:     Notes: Does NOT do ghost updates on vu upon entry

1462:            This should eventually replace DAFormFunction1

1464:     Level: advanced

1466: .seealso: DAComputeJacobian1WithAdic()

1468: @*/
1469: PetscErrorCode  DAFormFunction(DA da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w)
1470: {
1472:   void           *u,*fu;
1473:   DALocalInfo    info;
1474:   PetscErrorCode (*f)(DALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DALocalInfo*,void*,void*,void*))lf;
1475: 
1477:   DAGetLocalInfo(da,&info);
1478:   DAVecGetArray(da,vu,&u);
1479:   DAVecGetArray(da,vfu,&fu);

1481:   (*f)(&info,u,fu,w);
1482:   if (PetscExceptionValue(ierr)) {
1483:     PetscErrorCode pDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
1484:     pDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
1485:   }
1486: 

1488:   DAVecRestoreArray(da,vu,&u);
1489:   DAVecRestoreArray(da,vfu,&fu);
1490:   return(0);
1491: }

1495: /*@C 
1496:    DAFormFunctionLocal - This is a universal function evaluation routine for
1497:    a local DA function.

1499:    Collective on DA

1501:    Input Parameters:
1502: +  da - the DA context
1503: .  func - The local function
1504: .  X - input vector
1505: .  F - function vector
1506: -  ctx - A user context

1508:    Level: intermediate

1510: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
1511:           SNESSetFunction(), SNESSetJacobian()

1513: @*/
1514: PetscErrorCode  DAFormFunctionLocal(DA da, DALocalFunction1 func, Vec X, Vec F, void *ctx)
1515: {
1516:   Vec            localX;
1517:   DALocalInfo    info;
1518:   void          *u;
1519:   void          *fu;

1523:   DAGetLocalVector(da,&localX);
1524:   /*
1525:      Scatter ghost points to local vector, using the 2-step process
1526:         DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
1527:   */
1528:   DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
1529:   DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
1530:   DAGetLocalInfo(da,&info);
1531:   DAVecGetArray(da,localX,&u);
1532:   DAVecGetArray(da,F,&fu);
1533:   (*func)(&info,u,fu,ctx);
1534:   if (PetscExceptionValue(ierr)) {
1535:     PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1536:     pDAVecRestoreArray(da,F,&fu);CHKERRQ(pierr);
1537:   }
1538: 
1539:   DAVecRestoreArray(da,localX,&u);
1540:   DAVecRestoreArray(da,F,&fu);
1541:   if (PetscExceptionValue(ierr)) {
1542:     PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
1543:   }
1544: 
1545:   DARestoreLocalVector(da,&localX);
1546:   return(0);
1547: }

1551: /*@C 
1552:    DAFormFunctionLocalGhost - This is a universal function evaluation routine for
1553:    a local DA function, but the ghost values of the output are communicated and added.

1555:    Collective on DA

1557:    Input Parameters:
1558: +  da - the DA context
1559: .  func - The local function
1560: .  X - input vector
1561: .  F - function vector
1562: -  ctx - A user context

1564:    Level: intermediate

1566: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
1567:           SNESSetFunction(), SNESSetJacobian()

1569: @*/
1570: PetscErrorCode  DAFormFunctionLocalGhost(DA da, DALocalFunction1 func, Vec X, Vec F, void *ctx)
1571: {
1572:   Vec            localX, localF;
1573:   DALocalInfo    info;
1574:   void          *u;
1575:   void          *fu;

1579:   DAGetLocalVector(da,&localX);
1580:   DAGetLocalVector(da,&localF);
1581:   /*
1582:      Scatter ghost points to local vector, using the 2-step process
1583:         DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
1584:   */
1585:   DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
1586:   DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
1587:   VecSet(F, 0.0);
1588:   VecSet(localF, 0.0);
1589:   DAGetLocalInfo(da,&info);
1590:   DAVecGetArray(da,localX,&u);
1591:   DAVecGetArray(da,localF,&fu);
1592:   (*func)(&info,u,fu,ctx);
1593:   if (PetscExceptionValue(ierr)) {
1594:     PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1595:     pDAVecRestoreArray(da,localF,&fu);CHKERRQ(pierr);
1596:   }
1597: 
1598:   DALocalToGlobalBegin(da,localF,F);
1599:   DALocalToGlobalEnd(da,localF,F);
1600:   DAVecRestoreArray(da,localX,&u);
1601:   DAVecRestoreArray(da,localF,&fu);
1602:   if (PetscExceptionValue(ierr)) {
1603:     PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
1604:   DARestoreLocalVector(da,&localF);
1605:   }
1606: 
1607:   DARestoreLocalVector(da,&localX);
1608:   DARestoreLocalVector(da,&localF);
1609:   return(0);
1610: }

1614: /*@
1615:     DAFormFunction1 - Evaluates a user provided function on each processor that 
1616:         share a DA

1618:    Input Parameters:
1619: +    da - the DA that defines the grid
1620: .    vu - input vector
1621: .    vfu - output vector 
1622: -    w - any user data

1624:     Notes: Does NOT do ghost updates on vu upon entry

1626:     Level: advanced

1628: .seealso: DAComputeJacobian1WithAdic()

1630: @*/
1631: PetscErrorCode  DAFormFunction1(DA da,Vec vu,Vec vfu,void *w)
1632: {
1634:   void           *u,*fu;
1635:   DALocalInfo    info;
1636: 

1639:   DAGetLocalInfo(da,&info);
1640:   DAVecGetArray(da,vu,&u);
1641:   DAVecGetArray(da,vfu,&fu);

1643:   CHKMEMQ;
1644:   (*da->lf)(&info,u,fu,w);
1645:   if (PetscExceptionValue(ierr)) {
1646:     PetscErrorCode pDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
1647:     pDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
1648:   }
1649: 
1650:   CHKMEMQ;

1652:   DAVecRestoreArray(da,vu,&u);
1653:   DAVecRestoreArray(da,vfu,&fu);
1654:   return(0);
1655: }

1659: PetscErrorCode  DAFormFunctioniTest1(DA da,void *w)
1660: {
1661:   Vec            vu,fu,fui;
1663:   PetscInt       i,n;
1664:   PetscScalar    *ui;
1665:   PetscRandom    rnd;
1666:   PetscReal      norm;

1669:   DAGetLocalVector(da,&vu);
1670:   PetscRandomCreate(PETSC_COMM_SELF,&rnd);
1671:   PetscRandomSetFromOptions(rnd);
1672:   VecSetRandom(vu,rnd);
1673:   PetscRandomDestroy(rnd);

1675:   DAGetGlobalVector(da,&fu);
1676:   DAGetGlobalVector(da,&fui);
1677: 
1678:   DAFormFunction1(da,vu,fu,w);

1680:   VecGetArray(fui,&ui);
1681:   VecGetLocalSize(fui,&n);
1682:   for (i=0; i<n; i++) {
1683:     DAFormFunctioni1(da,i,vu,ui+i,w);
1684:   }
1685:   VecRestoreArray(fui,&ui);

1687:   VecAXPY(fui,-1.0,fu);
1688:   VecNorm(fui,NORM_2,&norm);
1689:   PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);
1690:   VecView(fu,0);
1691:   VecView(fui,0);

1693:   DARestoreLocalVector(da,&vu);
1694:   DARestoreGlobalVector(da,&fu);
1695:   DARestoreGlobalVector(da,&fui);
1696:   return(0);
1697: }

1701: /*@
1702:     DAFormFunctioni1 - Evaluates a user provided point-wise function

1704:    Input Parameters:
1705: +    da - the DA that defines the grid
1706: .    i - the component of the function we wish to compute (must be local)
1707: .    vu - input vector
1708: .    vfu - output value
1709: -    w - any user data

1711:     Notes: Does NOT do ghost updates on vu upon entry

1713:     Level: advanced

1715: .seealso: DAComputeJacobian1WithAdic()

1717: @*/
1718: PetscErrorCode  DAFormFunctioni1(DA da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
1719: {
1721:   void           *u;
1722:   DALocalInfo    info;
1723:   MatStencil     stencil;
1724: 

1727:   DAGetLocalInfo(da,&info);
1728:   DAVecGetArray(da,vu,&u);

1730:   /* figure out stencil value from i */
1731:   stencil.c = i % info.dof;
1732:   stencil.i = (i % (info.xm*info.dof))/info.dof;
1733:   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
1734:   stencil.k = i/(info.xm*info.ym*info.dof);

1736:   (*da->lfi)(&info,&stencil,u,vfu,w);

1738:   DAVecRestoreArray(da,vu,&u);
1739:   return(0);
1740: }

1744: /*@
1745:     DAFormFunctionib1 - Evaluates a user provided point-block function

1747:    Input Parameters:
1748: +    da - the DA that defines the grid
1749: .    i - the component of the function we wish to compute (must be local)
1750: .    vu - input vector
1751: .    vfu - output value
1752: -    w - any user data

1754:     Notes: Does NOT do ghost updates on vu upon entry

1756:     Level: advanced

1758: .seealso: DAComputeJacobian1WithAdic()

1760: @*/
1761: PetscErrorCode  DAFormFunctionib1(DA da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
1762: {
1764:   void           *u;
1765:   DALocalInfo    info;
1766:   MatStencil     stencil;
1767: 
1769:   DAGetLocalInfo(da,&info);
1770:   DAVecGetArray(da,vu,&u);

1772:   /* figure out stencil value from i */
1773:   stencil.c = i % info.dof;
1774:   if (stencil.c) SETERRQ(PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block");
1775:   stencil.i = (i % (info.xm*info.dof))/info.dof;
1776:   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
1777:   stencil.k = i/(info.xm*info.ym*info.dof);

1779:   (*da->lfib)(&info,&stencil,u,vfu,w);

1781:   DAVecRestoreArray(da,vu,&u);
1782:   return(0);
1783: }

1785: #if defined(new)
1788: /*
1789:   DAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local
1790:     function lives on a DA

1792:         y ~= (F(u + ha) - F(u))/h, 
1793:   where F = nonlinear function, as set by SNESSetFunction()
1794:         u = current iterate
1795:         h = difference interval
1796: */
1797: PetscErrorCode DAGetDiagonal_MFFD(DA da,Vec U,Vec a)
1798: {
1799:   PetscScalar    h,*aa,*ww,v;
1800:   PetscReal      epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON;
1802:   PetscInt       gI,nI;
1803:   MatStencil     stencil;
1804:   DALocalInfo    info;
1805: 
1807:   (*ctx->func)(0,U,a,ctx->funcctx);
1808:   (*ctx->funcisetbase)(U,ctx->funcctx);

1810:   VecGetArray(U,&ww);
1811:   VecGetArray(a,&aa);
1812: 
1813:   nI = 0;
1814:     h  = ww[gI];
1815:     if (h == 0.0) h = 1.0;
1816: #if !defined(PETSC_USE_COMPLEX)
1817:     if (h < umin && h >= 0.0)      h = umin;
1818:     else if (h < 0.0 && h > -umin) h = -umin;
1819: #else
1820:     if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0)     h = umin;
1821:     else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
1822: #endif
1823:     h     *= epsilon;
1824: 
1825:     ww[gI += h;
1826:     (*ctx->funci)(i,w,&v,ctx->funcctx);
1827:     aa[nI]  = (v - aa[nI])/h;
1828:     ww[gI] -= h;
1829:     nI++;
1830:   }
1831:   VecRestoreArray(U,&ww);
1832:   VecRestoreArray(a,&aa);
1833:   return(0);
1834: }
1835: #endif

1837: #if defined(PETSC_HAVE_ADIC)
1839: #include "adic/ad_utils.h"

1844: /*@C
1845:     DAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that 
1846:         share a DA

1848:    Input Parameters:
1849: +    da - the DA that defines the grid
1850: .    vu - input vector (ghosted)
1851: .    J - output matrix
1852: -    w - any user data

1854:    Level: advanced

1856:     Notes: Does NOT do ghost updates on vu upon entry

1858: .seealso: DAFormFunction1()

1860: @*/
1861: PetscErrorCode  DAComputeJacobian1WithAdic(DA da,Vec vu,Mat J,void *w)
1862: {
1864:   PetscInt       gtdof,tdof;
1865:   PetscScalar    *ustart;
1866:   DALocalInfo    info;
1867:   void           *ad_u,*ad_f,*ad_ustart,*ad_fstart;
1868:   ISColoring     iscoloring;

1871:   DAGetLocalInfo(da,&info);

1873:   PetscADResetIndep();

1875:   /* get space for derivative objects.  */
1876:   DAGetAdicArray(da,PETSC_TRUE,(void **)&ad_u,&ad_ustart,&gtdof);
1877:   DAGetAdicArray(da,PETSC_FALSE,(void **)&ad_f,&ad_fstart,&tdof);
1878:   VecGetArray(vu,&ustart);
1879:   DAGetColoring(da,IS_COLORING_GHOSTED,&iscoloring);

1881:   PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart);

1883:   VecRestoreArray(vu,&ustart);
1884:   ISColoringDestroy(iscoloring);
1885:   PetscADIncrementTotalGradSize(iscoloring->n);
1886:   PetscADSetIndepDone();

1889:   (*da->adic_lf)(&info,ad_u,ad_f,w);

1892:   /* stick the values into the matrix */
1893:   MatSetValuesAdic(J,(PetscScalar**)ad_fstart);

1895:   /* return space for derivative objects.  */
1896:   DARestoreAdicArray(da,PETSC_TRUE,(void **)&ad_u,&ad_ustart,&gtdof);
1897:   DARestoreAdicArray(da,PETSC_FALSE,(void **)&ad_f,&ad_fstart,&tdof);
1898:   return(0);
1899: }

1903: /*@C
1904:     DAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on 
1905:     each processor that shares a DA.

1907:     Input Parameters:
1908: +   da - the DA that defines the grid
1909: .   vu - Jacobian is computed at this point (ghosted)
1910: .   v - product is done on this vector (ghosted)
1911: .   fu - output vector = J(vu)*v (not ghosted)
1912: -   w - any user data

1914:     Notes: 
1915:     This routine does NOT do ghost updates on vu upon entry.

1917:    Level: advanced

1919: .seealso: DAFormFunction1()

1921: @*/
1922: PetscErrorCode  DAMultiplyByJacobian1WithAdic(DA da,Vec vu,Vec v,Vec f,void *w)
1923: {
1925:   PetscInt       i,gtdof,tdof;
1926:   PetscScalar    *avu,*av,*af,*ad_vustart,*ad_fstart;
1927:   DALocalInfo    info;
1928:   void           *ad_vu,*ad_f;

1931:   DAGetLocalInfo(da,&info);

1933:   /* get space for derivative objects.  */
1934:   DAGetAdicMFArray(da,PETSC_TRUE,(void **)&ad_vu,(void**)&ad_vustart,&gtdof);
1935:   DAGetAdicMFArray(da,PETSC_FALSE,(void **)&ad_f,(void**)&ad_fstart,&tdof);

1937:   /* copy input vector into derivative object */
1938:   VecGetArray(vu,&avu);
1939:   VecGetArray(v,&av);
1940:   for (i=0; i<gtdof; i++) {
1941:     ad_vustart[2*i]   = avu[i];
1942:     ad_vustart[2*i+1] = av[i];
1943:   }
1944:   VecRestoreArray(vu,&avu);
1945:   VecRestoreArray(v,&av);

1947:   PetscADResetIndep();
1948:   PetscADIncrementTotalGradSize(1);
1949:   PetscADSetIndepDone();

1951:   (*da->adicmf_lf)(&info,ad_vu,ad_f,w);

1953:   /* stick the values into the vector */
1954:   VecGetArray(f,&af);
1955:   for (i=0; i<tdof; i++) {
1956:     af[i] = ad_fstart[2*i+1];
1957:   }
1958:   VecRestoreArray(f,&af);

1960:   /* return space for derivative objects.  */
1961:   DARestoreAdicMFArray(da,PETSC_TRUE,(void **)&ad_vu,(void**)&ad_vustart,&gtdof);
1962:   DARestoreAdicMFArray(da,PETSC_FALSE,(void **)&ad_f,(void**)&ad_fstart,&tdof);
1963:   return(0);
1964: }
1965: #endif

1969: /*@
1970:     DAComputeJacobian1 - Evaluates a local Jacobian function on each processor that 
1971:         share a DA

1973:    Input Parameters:
1974: +    da - the DA that defines the grid
1975: .    vu - input vector (ghosted)
1976: .    J - output matrix
1977: -    w - any user data

1979:     Notes: Does NOT do ghost updates on vu upon entry

1981:     Level: advanced

1983: .seealso: DAFormFunction1()

1985: @*/
1986: PetscErrorCode  DAComputeJacobian1(DA da,Vec vu,Mat J,void *w)
1987: {
1989:   void           *u;
1990:   DALocalInfo    info;

1993:   DAGetLocalInfo(da,&info);
1994:   DAVecGetArray(da,vu,&u);
1995:   (*da->lj)(&info,u,J,w);
1996:   DAVecRestoreArray(da,vu,&u);
1997:   return(0);
1998: }


2003: /*
2004:     DAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that 
2005:         share a DA

2007:    Input Parameters:
2008: +    da - the DA that defines the grid
2009: .    vu - input vector (ghosted)
2010: .    J - output matrix
2011: -    w - any user data

2013:     Notes: Does NOT do ghost updates on vu upon entry

2015: .seealso: DAFormFunction1()

2017: */
2018: PetscErrorCode  DAComputeJacobian1WithAdifor(DA da,Vec vu,Mat J,void *w)
2019: {
2020:   PetscErrorCode  ierr;
2021:   PetscInt        i,Nc,N;
2022:   ISColoringValue *color;
2023:   DALocalInfo     info;
2024:   PetscScalar     *u,*g_u,*g_f,*f,*p_u;
2025:   ISColoring      iscoloring;
2026:   void            (*lf)(PetscInt*,DALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) =
2027:                   (void (*)(PetscInt*,DALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*da->adifor_lf;

2030:   DAGetColoring(da,IS_COLORING_GHOSTED,&iscoloring);
2031:   Nc   = iscoloring->n;
2032:   DAGetLocalInfo(da,&info);
2033:   N    = info.gxm*info.gym*info.gzm*info.dof;

2035:   /* get space for derivative objects.  */
2036:   PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);
2037:   PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));
2038:   p_u   = g_u;
2039:   color = iscoloring->colors;
2040:   for (i=0; i<N; i++) {
2041:     p_u[*color++] = 1.0;
2042:     p_u          += Nc;
2043:   }
2044:   ISColoringDestroy(iscoloring);
2045:   PetscMalloc(Nc*info.xm*info.ym*info.zm*info.dof*sizeof(PetscScalar),&g_f);
2046:   PetscMalloc(info.xm*info.ym*info.zm*info.dof*sizeof(PetscScalar),&f);

2048:   /* Seed the input array g_u with coloring information */
2049: 
2050:   VecGetArray(vu,&u);
2051:   (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);
2052:   VecRestoreArray(vu,&u);

2054:   /* stick the values into the matrix */
2055:   /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */
2056:   MatSetValuesAdifor(J,Nc,g_f);

2058:   /* return space for derivative objects.  */
2059:   PetscFree(g_u);
2060:   PetscFree(g_f);
2061:   PetscFree(f);
2062:   return(0);
2063: }

2067: /*@C 
2068:    DAFormjacobianLocal - This is a universal Jacobian evaluation routine for
2069:    a local DA function.

2071:    Collective on DA

2073:    Input Parameters:
2074: +  da - the DA context
2075: .  func - The local function
2076: .  X - input vector
2077: .  J - Jacobian matrix
2078: -  ctx - A user context

2080:    Level: intermediate

2082: .seealso: DASetLocalFunction(), DASetLocalJacobian(), DASetLocalAdicFunction(), DASetLocalAdicMFFunction(),
2083:           SNESSetFunction(), SNESSetJacobian()

2085: @*/
2086: PetscErrorCode  DAFormJacobianLocal(DA da, DALocalFunction1 func, Vec X, Mat J, void *ctx)
2087: {
2088:   Vec            localX;
2089:   DALocalInfo    info;
2090:   void          *u;

2094:   DAGetLocalVector(da,&localX);
2095:   /*
2096:      Scatter ghost points to local vector, using the 2-step process
2097:         DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
2098:   */
2099:   DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX);
2100:   DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX);
2101:   DAGetLocalInfo(da,&info);
2102:   DAVecGetArray(da,localX,&u);
2103:   (*func)(&info,u,J,ctx);
2104:   if (PetscExceptionValue(ierr)) {
2105:     PetscErrorCode pDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
2106:   }
2107: 
2108:   DAVecRestoreArray(da,localX,&u);
2109:   if (PetscExceptionValue(ierr)) {
2110:     PetscErrorCode pDARestoreLocalVector(da,&localX);CHKERRQ(pierr);
2111:   }
2112: 
2113:   DARestoreLocalVector(da,&localX);
2114:   return(0);
2115: }

2119: /*@C
2120:     DAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC
2121:     to a vector on each processor that shares a DA.

2123:    Input Parameters:
2124: +    da - the DA that defines the grid
2125: .    vu - Jacobian is computed at this point (ghosted)
2126: .    v - product is done on this vector (ghosted)
2127: .    fu - output vector = J(vu)*v (not ghosted)
2128: -    w - any user data

2130:     Notes: 
2131:     This routine does NOT do ghost updates on vu and v upon entry.
2132:            
2133:     Automatically calls DAMultiplyByJacobian1WithAdifor() or DAMultiplyByJacobian1WithAdic()
2134:     depending on whether DASetLocalAdicMFFunction() or DASetLocalAdiforMFFunction() was called.

2136:    Level: advanced

2138: .seealso: DAFormFunction1(), DAMultiplyByJacobian1WithAdifor(), DAMultiplyByJacobian1WithAdic()

2140: @*/
2141: PetscErrorCode  DAMultiplyByJacobian1WithAD(DA da,Vec u,Vec v,Vec f,void *w)
2142: {

2146:   if (da->adicmf_lf) {
2147: #if defined(PETSC_HAVE_ADIC)
2148:     DAMultiplyByJacobian1WithAdic(da,u,v,f,w);
2149: #else
2150:     SETERRQ(PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers");
2151: #endif
2152:   } else if (da->adiformf_lf) {
2153:     DAMultiplyByJacobian1WithAdifor(da,u,v,f,w);
2154:   } else {
2155:     SETERRQ(PETSC_ERR_ORDER,"Must call DASetLocalAdiforMFFunction() or DASetLocalAdicMFFunction() before using");
2156:   }
2157:   return(0);
2158: }


2163: /*@C
2164:     DAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that 
2165:         share a DA to a vector

2167:    Input Parameters:
2168: +    da - the DA that defines the grid
2169: .    vu - Jacobian is computed at this point (ghosted)
2170: .    v - product is done on this vector (ghosted)
2171: .    fu - output vector = J(vu)*v (not ghosted)
2172: -    w - any user data

2174:     Notes: Does NOT do ghost updates on vu and v upon entry

2176:    Level: advanced

2178: .seealso: DAFormFunction1()

2180: @*/
2181: PetscErrorCode  DAMultiplyByJacobian1WithAdifor(DA da,Vec u,Vec v,Vec f,void *w)
2182: {
2184:   PetscScalar    *au,*av,*af,*awork;
2185:   Vec            work;
2186:   DALocalInfo    info;
2187:   void           (*lf)(DALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) =
2188:                  (void (*)(DALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*da->adiformf_lf;

2191:   DAGetLocalInfo(da,&info);

2193:   DAGetGlobalVector(da,&work);
2194:   VecGetArray(u,&au);
2195:   VecGetArray(v,&av);
2196:   VecGetArray(f,&af);
2197:   VecGetArray(work,&awork);
2198:   (lf)(&info,au,av,awork,af,w,&ierr);
2199:   VecRestoreArray(u,&au);
2200:   VecRestoreArray(v,&av);
2201:   VecRestoreArray(f,&af);
2202:   VecRestoreArray(work,&awork);
2203:   DARestoreGlobalVector(da,&work);

2205:   return(0);
2206: }

2210: /*@
2211:        DASetInterpolationType - Sets the type of interpolation that will be 
2212:           returned by DAGetInterpolation()

2214:    Collective on DA

2216:    Input Parameter:
2217: +  da - initial distributed array
2218: .  ctype - DA_Q1 and DA_Q0 are currently the only supported forms

2220:    Level: intermediate

2222:    Notes: you should call this on the coarser of the two DAs you pass to DAGetInterpolation()

2224: .keywords:  distributed array, interpolation

2226: .seealso: DACreate1d(), DACreate2d(), DACreate3d(), DADestroy(), DA, DAInterpolationType
2227: @*/
2228: PetscErrorCode  DASetInterpolationType(DA da,DAInterpolationType ctype)
2229: {
2232:   da->interptype = ctype;
2233:   return(0);
2234: }