Actual source code: dm.c

petsc-dev 2014-02-02
Report Typos and Errors
  1: #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
  2: #include <petscsf.h>

  4: PetscClassId  DM_CLASSID;
  5: PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;

  9: /*@
 10:   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().

 12:    If you never  call DMSetType()  it will generate an
 13:    error when you try to use the vector.

 15:   Collective on MPI_Comm

 17:   Input Parameter:
 18: . comm - The communicator for the DM object

 20:   Output Parameter:
 21: . dm - The DM object

 23:   Level: beginner

 25: .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
 26: @*/
 27: PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
 28: {
 29:   DM             v;

 34:   *dm = NULL;
 35:   PetscSysInitializePackage();
 36:   VecInitializePackage();
 37:   MatInitializePackage();
 38:   DMInitializePackage();

 40:   PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);
 41:   PetscMemzero(v->ops, sizeof(struct _DMOps));


 44:   v->ltogmap              = NULL;
 45:   v->ltogmapb             = NULL;
 46:   v->bs                   = 1;
 47:   v->coloringtype         = IS_COLORING_GLOBAL;
 48:   PetscSFCreate(comm, &v->sf);
 49:   PetscSFCreate(comm, &v->defaultSF);
 50:   v->defaultSection       = NULL;
 51:   v->defaultGlobalSection = NULL;
 52:   {
 53:     PetscInt i;
 54:     for (i = 0; i < 10; ++i) {
 55:       v->nullspaceConstructors[i] = NULL;
 56:     }
 57:   }
 58:   v->numFields = 0;
 59:   v->fields    = NULL;
 60:   DMSetVecType(v,VECSTANDARD);
 61:   DMSetMatType(v,MATAIJ);
 62:   *dm = v;
 63:   return(0);
 64: }

 68: /*@
 69:   DMClone - Creates a DM object with the same topology as the original.

 71:   Collective on MPI_Comm

 73:   Input Parameter:
 74: . dm - The original DM object

 76:   Output Parameter:
 77: . newdm  - The new DM object

 79:   Level: beginner

 81: .keywords: DM, topology, create
 82: @*/
 83: PetscErrorCode DMClone(DM dm, DM *newdm)
 84: {
 85:   PetscSF        sf;
 86:   Vec            coords;
 87:   void          *ctx;

 93:   DMCreate(PetscObjectComm((PetscObject)dm), newdm);
 94:   if (dm->ops->clone) {
 95:     (*dm->ops->clone)(dm, newdm);
 96:   }
 97:   (*newdm)->setupcalled = PETSC_TRUE;
 98:   DMGetPointSF(dm, &sf);
 99:   DMSetPointSF(*newdm, sf);
100:   DMGetApplicationContext(dm, &ctx);
101:   DMSetApplicationContext(*newdm, ctx);
102:   DMGetCoordinatesLocal(dm, &coords);
103:   if (coords) {
104:     DMSetCoordinatesLocal(*newdm, coords);
105:   } else {
106:     DMGetCoordinates(dm, &coords);
107:     if (coords) {DMSetCoordinates(*newdm, coords);}
108:   }
109:   return(0);
110: }

114: /*@C
115:        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()

117:    Logically Collective on DMDA

119:    Input Parameter:
120: +  da - initial distributed array
121: .  ctype - the vector type, currently either VECSTANDARD or VECCUSP

123:    Options Database:
124: .   -dm_vec_type ctype

126:    Level: intermediate

128: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
129: @*/
130: PetscErrorCode  DMSetVecType(DM da,VecType ctype)
131: {

136:   PetscFree(da->vectype);
137:   PetscStrallocpy(ctype,(char**)&da->vectype);
138:   return(0);
139: }

143: /*@C
144:        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()

146:    Logically Collective on DMDA

148:    Input Parameter:
149: .  da - initial distributed array

151:    Output Parameter:
152: .  ctype - the vector type

154:    Level: intermediate

156: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
157: @*/
158: PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
159: {
162:   *ctype = da->vectype;
163:   return(0);
164: }

168: /*@
169:   VecGetDM - Gets the DM defining the data layout of the vector

171:   Not collective

173:   Input Parameter:
174: . v - The Vec

176:   Output Parameter:
177: . dm - The DM

179:   Level: intermediate

181: .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
182: @*/
183: PetscErrorCode VecGetDM(Vec v, DM *dm)
184: {

190:   PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);
191:   return(0);
192: }

196: /*@
197:   VecSetDM - Sets the DM defining the data layout of the vector

199:   Not collective

201:   Input Parameters:
202: + v - The Vec
203: - dm - The DM

205:   Level: intermediate

207: .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
208: @*/
209: PetscErrorCode VecSetDM(Vec v, DM dm)
210: {

216:   PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);
217:   return(0);
218: }

222: /*@C
223:        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()

225:    Logically Collective on DM

227:    Input Parameter:
228: +  dm - the DM context
229: .  ctype - the matrix type

231:    Options Database:
232: .   -dm_mat_type ctype

234:    Level: intermediate

236: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
237: @*/
238: PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
239: {

244:   PetscFree(dm->mattype);
245:   PetscStrallocpy(ctype,(char**)&dm->mattype);
246:   return(0);
247: }

251: /*@C
252:        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()

254:    Logically Collective on DM

256:    Input Parameter:
257: .  dm - the DM context

259:    Output Parameter:
260: .  ctype - the matrix type

262:    Options Database:
263: .   -dm_mat_type ctype

265:    Level: intermediate

267: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
268: @*/
269: PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
270: {
273:   *ctype = dm->mattype;
274:   return(0);
275: }

279: /*@
280:   MatGetDM - Gets the DM defining the data layout of the matrix

282:   Not collective

284:   Input Parameter:
285: . A - The Mat

287:   Output Parameter:
288: . dm - The DM

290:   Level: intermediate

292: .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
293: @*/
294: PetscErrorCode MatGetDM(Mat A, DM *dm)
295: {

301:   PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);
302:   return(0);
303: }

307: /*@
308:   MatSetDM - Sets the DM defining the data layout of the matrix

310:   Not collective

312:   Input Parameters:
313: + A - The Mat
314: - dm - The DM

316:   Level: intermediate

318: .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
319: @*/
320: PetscErrorCode MatSetDM(Mat A, DM dm)
321: {

327:   PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);
328:   return(0);
329: }

333: /*@C
334:    DMSetOptionsPrefix - Sets the prefix used for searching for all
335:    DMDA options in the database.

337:    Logically Collective on DMDA

339:    Input Parameter:
340: +  da - the DMDA context
341: -  prefix - the prefix to prepend to all option names

343:    Notes:
344:    A hyphen (-) must NOT be given at the beginning of the prefix name.
345:    The first character of all runtime options is AUTOMATICALLY the hyphen.

347:    Level: advanced

349: .keywords: DMDA, set, options, prefix, database

351: .seealso: DMSetFromOptions()
352: @*/
353: PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
354: {

359:   PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);
360:   return(0);
361: }

365: /*@
366:     DMDestroy - Destroys a vector packer or DMDA.

368:     Collective on DM

370:     Input Parameter:
371: .   dm - the DM object to destroy

373:     Level: developer

375: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

377: @*/
378: PetscErrorCode  DMDestroy(DM *dm)
379: {
380:   PetscInt       i, cnt = 0, f;
381:   DMNamedVecLink nlink,nnext;

385:   if (!*dm) return(0);

388:   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
389:   for (f = 0; f < (*dm)->numFields; ++f) {
390:     PetscObjectCompose((*dm)->fields[f], "pmat", NULL);
391:     PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);
392:     PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);
393:   }
394:   /* count all the circular references of DM and its contained Vecs */
395:   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
396:     if ((*dm)->localin[i])  cnt++;
397:     if ((*dm)->globalin[i]) cnt++;
398:   }
399:   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
400:   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
401:   if ((*dm)->x) {
402:     DM obj;
403:     VecGetDM((*dm)->x, &obj);
404:     if (obj == *dm) cnt++;
405:   }

407:   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; return(0);}
408:   /*
409:      Need this test because the dm references the vectors that
410:      reference the dm, so destroying the dm calls destroy on the
411:      vectors that cause another destroy on the dm
412:   */
413:   if (((PetscObject)(*dm))->refct < 0) return(0);
414:   ((PetscObject) (*dm))->refct = 0;
415:   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
416:     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
417:     VecDestroy(&(*dm)->localin[i]);
418:   }
419:   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
420:     nnext = nlink->next;
421:     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
422:     PetscFree(nlink->name);
423:     VecDestroy(&nlink->X);
424:     PetscFree(nlink);
425:   }
426:   (*dm)->namedglobal = NULL;

428:   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
429:     nnext = nlink->next;
430:     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
431:     PetscFree(nlink->name);
432:     VecDestroy(&nlink->X);
433:     PetscFree(nlink);
434:   }
435:   (*dm)->namedlocal = NULL;

437:   /* Destroy the list of hooks */
438:   {
439:     DMCoarsenHookLink link,next;
440:     for (link=(*dm)->coarsenhook; link; link=next) {
441:       next = link->next;
442:       PetscFree(link);
443:     }
444:     (*dm)->coarsenhook = NULL;
445:   }
446:   {
447:     DMRefineHookLink link,next;
448:     for (link=(*dm)->refinehook; link; link=next) {
449:       next = link->next;
450:       PetscFree(link);
451:     }
452:     (*dm)->refinehook = NULL;
453:   }
454:   {
455:     DMSubDomainHookLink link,next;
456:     for (link=(*dm)->subdomainhook; link; link=next) {
457:       next = link->next;
458:       PetscFree(link);
459:     }
460:     (*dm)->subdomainhook = NULL;
461:   }
462:   {
463:     DMGlobalToLocalHookLink link,next;
464:     for (link=(*dm)->gtolhook; link; link=next) {
465:       next = link->next;
466:       PetscFree(link);
467:     }
468:     (*dm)->gtolhook = NULL;
469:   }
470:   /* Destroy the work arrays */
471:   {
472:     DMWorkLink link,next;
473:     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
474:     for (link=(*dm)->workin; link; link=next) {
475:       next = link->next;
476:       PetscFree(link->mem);
477:       PetscFree(link);
478:     }
479:     (*dm)->workin = NULL;
480:   }

482:   PetscObjectDestroy(&(*dm)->dmksp);
483:   PetscObjectDestroy(&(*dm)->dmsnes);
484:   PetscObjectDestroy(&(*dm)->dmts);

486:   if ((*dm)->ctx && (*dm)->ctxdestroy) {
487:     (*(*dm)->ctxdestroy)(&(*dm)->ctx);
488:   }
489:   VecDestroy(&(*dm)->x);
490:   MatFDColoringDestroy(&(*dm)->fd);
491:   DMClearGlobalVectors(*dm);
492:   ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);
493:   ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);
494:   PetscFree((*dm)->vectype);
495:   PetscFree((*dm)->mattype);

497:   PetscSectionDestroy(&(*dm)->defaultSection);
498:   PetscSectionDestroy(&(*dm)->defaultGlobalSection);
499:   PetscLayoutDestroy(&(*dm)->map);
500:   PetscSFDestroy(&(*dm)->sf);
501:   PetscSFDestroy(&(*dm)->defaultSF);

503:   DMDestroy(&(*dm)->coordinateDM);
504:   VecDestroy(&(*dm)->coordinates);
505:   VecDestroy(&(*dm)->coordinatesLocal);

507:   for (f = 0; f < (*dm)->numFields; ++f) {
508:     PetscObjectDestroy(&(*dm)->fields[f]);
509:   }
510:   PetscFree((*dm)->fields);
511:   /* if memory was published with SAWs then destroy it */
512:   PetscObjectSAWsViewOff((PetscObject)*dm);

514:   (*(*dm)->ops->destroy)(*dm);
515:   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
516:   PetscHeaderDestroy(dm);
517:   return(0);
518: }

522: /*@
523:     DMSetUp - sets up the data structures inside a DM object

525:     Collective on DM

527:     Input Parameter:
528: .   dm - the DM object to setup

530:     Level: developer

532: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

534: @*/
535: PetscErrorCode  DMSetUp(DM dm)
536: {

541:   if (dm->setupcalled) return(0);
542:   if (dm->ops->setup) {
543:     (*dm->ops->setup)(dm);
544:   }
545:   dm->setupcalled = PETSC_TRUE;
546:   return(0);
547: }

551: /*@
552:     DMSetFromOptions - sets parameters in a DM from the options database

554:     Collective on DM

556:     Input Parameter:
557: .   dm - the DM object to set options for

559:     Options Database:
560: +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
561: .   -dm_vec_type <type>  type of vector to create inside DM
562: .   -dm_mat_type <type>  type of matrix to create inside DM
563: -   -dm_coloring_type <global or ghosted>

565:     Level: developer

567: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

569: @*/
570: PetscErrorCode  DMSetFromOptions(DM dm)
571: {
572:   char           typeName[256];
573:   PetscBool      flg;

578:   PetscObjectOptionsBegin((PetscObject)dm);
579:   PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);
580:   PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);
581:   if (flg) {
582:     DMSetVecType(dm,typeName);
583:   }
584:   PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);
585:   if (flg) {
586:     DMSetMatType(dm,typeName);
587:   }
588:   PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);
589:   if (dm->ops->setfromoptions) {
590:     (*dm->ops->setfromoptions)(dm);
591:   }
592:   /* process any options handlers added with PetscObjectAddOptionsHandler() */
593:   PetscObjectProcessOptionsHandlers((PetscObject) dm);
594:   PetscOptionsEnd();
595:   return(0);
596: }

600: /*@C
601:     DMView - Views a vector packer or DMDA.

603:     Collective on DM

605:     Input Parameter:
606: +   dm - the DM object to view
607: -   v - the viewer

609:     Level: developer

611: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

613: @*/
614: PetscErrorCode  DMView(DM dm,PetscViewer v)
615: {
617:   PetscBool      isbinary;

621:   if (!v) {
622:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);
623:   }
624:   PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);
625:   if (isbinary) {
626:     PetscInt classid = DM_FILE_CLASSID;
627:     char     type[256];

629:     PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);
630:     PetscStrncpy(type,((PetscObject)dm)->type_name,256);
631:     PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);
632:   }
633:   if (dm->ops->view) {
634:     (*dm->ops->view)(dm,v);
635:   }
636:   return(0);
637: }

641: /*@
642:     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object

644:     Collective on DM

646:     Input Parameter:
647: .   dm - the DM object

649:     Output Parameter:
650: .   vec - the global vector

652:     Level: beginner

654: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

656: @*/
657: PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
658: {

663:   (*dm->ops->createglobalvector)(dm,vec);
664:   return(0);
665: }

669: /*@
670:     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object

672:     Not Collective

674:     Input Parameter:
675: .   dm - the DM object

677:     Output Parameter:
678: .   vec - the local vector

680:     Level: beginner

682: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()

684: @*/
685: PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
686: {

691:   (*dm->ops->createlocalvector)(dm,vec);
692:   return(0);
693: }

697: /*@
698:    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.

700:    Collective on DM

702:    Input Parameter:
703: .  dm - the DM that provides the mapping

705:    Output Parameter:
706: .  ltog - the mapping

708:    Level: intermediate

710:    Notes:
711:    This mapping can then be used by VecSetLocalToGlobalMapping() or
712:    MatSetLocalToGlobalMapping().

714: .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
715: @*/
716: PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
717: {

723:   if (!dm->ltogmap) {
724:     PetscSection section, sectionGlobal;

726:     DMGetDefaultSection(dm, &section);
727:     if (section) {
728:       PetscInt *ltog;
729:       PetscInt pStart, pEnd, size, p, l;

731:       DMGetDefaultGlobalSection(dm, &sectionGlobal);
732:       PetscSectionGetChart(section, &pStart, &pEnd);
733:       PetscSectionGetStorageSize(section, &size);
734:       PetscMalloc1(size, &ltog); /* We want the local+overlap size */
735:       for (p = pStart, l = 0; p < pEnd; ++p) {
736:         PetscInt dof, off, c;

738:         /* Should probably use constrained dofs */
739:         PetscSectionGetDof(section, p, &dof);
740:         PetscSectionGetOffset(sectionGlobal, p, &off);
741:         for (c = 0; c < dof; ++c, ++l) {
742:           ltog[l] = off+c;
743:         }
744:       }
745:       ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);
746:       PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);
747:     } else {
748:       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
749:       (*dm->ops->getlocaltoglobalmapping)(dm);
750:     }
751:   }
752:   *ltog = dm->ltogmap;
753:   return(0);
754: }

758: /*@
759:    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.

761:    Collective on DM

763:    Input Parameter:
764: .  da - the distributed array that provides the mapping

766:    Output Parameter:
767: .  ltog - the block mapping

769:    Level: intermediate

771:    Notes:
772:    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
773:    MatSetLocalToGlobalMappingBlock().

775: .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
776: @*/
777: PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
778: {

784:   if (!dm->ltogmapb) {
785:     PetscInt bs;
786:     DMGetBlockSize(dm,&bs);
787:     if (bs > 1) {
788:       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
789:       (*dm->ops->getlocaltoglobalmappingblock)(dm);
790:     } else {
791:       DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);
792:       PetscObjectReference((PetscObject)dm->ltogmapb);
793:     }
794:   }
795:   *ltog = dm->ltogmapb;
796:   return(0);
797: }

801: /*@
802:    DMGetBlockSize - Gets the inherent block size associated with a DM

804:    Not Collective

806:    Input Parameter:
807: .  dm - the DM with block structure

809:    Output Parameter:
810: .  bs - the block size, 1 implies no exploitable block structure

812:    Level: intermediate

814: .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
815: @*/
816: PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
817: {
821:   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
822:   *bs = dm->bs;
823:   return(0);
824: }

828: /*@
829:     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects

831:     Collective on DM

833:     Input Parameter:
834: +   dm1 - the DM object
835: -   dm2 - the second, finer DM object

837:     Output Parameter:
838: +  mat - the interpolation
839: -  vec - the scaling (optional)

841:     Level: developer

843:     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
844:         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.

846:         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
847:         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.


850: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()

852: @*/
853: PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
854: {

860:   (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);
861:   return(0);
862: }

866: /*@
867:     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects

869:     Collective on DM

871:     Input Parameter:
872: +   dm1 - the DM object
873: -   dm2 - the second, finer DM object

875:     Output Parameter:
876: .   ctx - the injection

878:     Level: developer

880:    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
881:         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.

883: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()

885: @*/
886: PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
887: {

893:   (*dm1->ops->getinjection)(dm1,dm2,ctx);
894:   return(0);
895: }

899: /*@
900:     DMCreateColoring - Gets coloring for a DM

902:     Collective on DM

904:     Input Parameter:
905: +   dm - the DM object
906: -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL

908:     Output Parameter:
909: .   coloring - the coloring

911:     Level: developer

913: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()

915: @*/
916: PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
917: {

922:   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
923:   (*dm->ops->getcoloring)(dm,ctype,coloring);
924:   return(0);
925: }

929: /*@
930:     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite

932:     Collective on DM

934:     Input Parameter:
935: .   dm - the DM object

937:     Output Parameter:
938: .   mat - the empty Jacobian

940:     Level: beginner

942:     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
943:        do not need to do it yourself.

945:        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
946:        the nonzero pattern call DMDASetMatPreallocateOnly()

948:        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
949:        internally by PETSc.

951:        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
952:        the indices for the global numbering for DMDAs which is complicated.

954: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()

956: @*/
957: PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
958: {

963:   MatInitializePackage();
966:   (*dm->ops->creatematrix)(dm,mat);
967:   return(0);
968: }

972: /*@
973:   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
974:     preallocated but the nonzero structure and zero values will not be set.

976:   Logically Collective on DMDA

978:   Input Parameter:
979: + dm - the DM
980: - only - PETSC_TRUE if only want preallocation

982:   Level: developer
983: .seealso DMCreateMatrix()
984: @*/
985: PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
986: {
989:   dm->prealloc_only = only;
990:   return(0);
991: }

995: /*@C
996:   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()

998:   Not Collective

1000:   Input Parameters:
1001: + dm - the DM object
1002: . count - The minium size
1003: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)

1005:   Output Parameter:
1006: . array - the work array

1008:   Level: developer

1010: .seealso DMDestroy(), DMCreate()
1011: @*/
1012: PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1013: {
1015:   DMWorkLink     link;
1016:   size_t         size;

1021:   if (dm->workin) {
1022:     link       = dm->workin;
1023:     dm->workin = dm->workin->next;
1024:   } else {
1025:     PetscNewLog(dm,&link);
1026:   }
1027:   PetscDataTypeGetSize(dtype,&size);
1028:   if (size*count > link->bytes) {
1029:     PetscFree(link->mem);
1030:     PetscMalloc(size*count,&link->mem);
1031:     link->bytes = size*count;
1032:   }
1033:   link->next   = dm->workout;
1034:   dm->workout  = link;
1035:   *(void**)mem = link->mem;
1036:   return(0);
1037: }

1041: /*@C
1042:   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()

1044:   Not Collective

1046:   Input Parameters:
1047: + dm - the DM object
1048: . count - The minium size
1049: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)

1051:   Output Parameter:
1052: . array - the work array

1054:   Level: developer

1056: .seealso DMDestroy(), DMCreate()
1057: @*/
1058: PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1059: {
1060:   DMWorkLink *p,link;

1065:   for (p=&dm->workout; (link=*p); p=&link->next) {
1066:     if (link->mem == *(void**)mem) {
1067:       *p           = link->next;
1068:       link->next   = dm->workin;
1069:       dm->workin   = link;
1070:       *(void**)mem = NULL;
1071:       return(0);
1072:     }
1073:   }
1074:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1075:   return(0);
1076: }

1080: PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1081: {
1084:   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1085:   dm->nullspaceConstructors[field] = nullsp;
1086:   return(0);
1087: }

1091: /*@C
1092:   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field

1094:   Not collective

1096:   Input Parameter:
1097: . dm - the DM object

1099:   Output Parameters:
1100: + numFields  - The number of fields (or NULL if not requested)
1101: . fieldNames - The name for each field (or NULL if not requested)
1102: - fields     - The global indices for each field (or NULL if not requested)

1104:   Level: intermediate

1106:   Notes:
1107:   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1108:   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1109:   PetscFree().

1111: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1112: @*/
1113: PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1114: {
1115:   PetscSection   section, sectionGlobal;

1120:   if (numFields) {
1122:     *numFields = 0;
1123:   }
1124:   if (fieldNames) {
1126:     *fieldNames = NULL;
1127:   }
1128:   if (fields) {
1130:     *fields = NULL;
1131:   }
1132:   DMGetDefaultSection(dm, &section);
1133:   if (section) {
1134:     PetscInt *fieldSizes, **fieldIndices;
1135:     PetscInt nF, f, pStart, pEnd, p;

1137:     DMGetDefaultGlobalSection(dm, &sectionGlobal);
1138:     PetscSectionGetNumFields(section, &nF);
1139:     PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);
1140:     PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);
1141:     for (f = 0; f < nF; ++f) {
1142:       fieldSizes[f] = 0;
1143:     }
1144:     for (p = pStart; p < pEnd; ++p) {
1145:       PetscInt gdof;

1147:       PetscSectionGetDof(sectionGlobal, p, &gdof);
1148:       if (gdof > 0) {
1149:         for (f = 0; f < nF; ++f) {
1150:           PetscInt fdof, fcdof;

1152:           PetscSectionGetFieldDof(section, p, f, &fdof);
1153:           PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1154:           fieldSizes[f] += fdof-fcdof;
1155:         }
1156:       }
1157:     }
1158:     for (f = 0; f < nF; ++f) {
1159:       PetscMalloc1(fieldSizes[f], &fieldIndices[f]);
1160:       fieldSizes[f] = 0;
1161:     }
1162:     for (p = pStart; p < pEnd; ++p) {
1163:       PetscInt gdof, goff;

1165:       PetscSectionGetDof(sectionGlobal, p, &gdof);
1166:       if (gdof > 0) {
1167:         PetscSectionGetOffset(sectionGlobal, p, &goff);
1168:         for (f = 0; f < nF; ++f) {
1169:           PetscInt fdof, fcdof, fc;

1171:           PetscSectionGetFieldDof(section, p, f, &fdof);
1172:           PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1173:           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1174:             fieldIndices[f][fieldSizes[f]] = goff++;
1175:           }
1176:         }
1177:       }
1178:     }
1179:     if (numFields) *numFields = nF;
1180:     if (fieldNames) {
1181:       PetscMalloc1(nF, fieldNames);
1182:       for (f = 0; f < nF; ++f) {
1183:         const char *fieldName;

1185:         PetscSectionGetFieldName(section, f, &fieldName);
1186:         PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);
1187:       }
1188:     }
1189:     if (fields) {
1190:       PetscMalloc1(nF, fields);
1191:       for (f = 0; f < nF; ++f) {
1192:         ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);
1193:       }
1194:     }
1195:     PetscFree2(fieldSizes,fieldIndices);
1196:   } else if (dm->ops->createfieldis) {
1197:     (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);
1198:   }
1199:   return(0);
1200: }


1205: /*@C
1206:   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1207:                           corresponding to different fields: each IS contains the global indices of the dofs of the
1208:                           corresponding field. The optional list of DMs define the DM for each subproblem.
1209:                           Generalizes DMCreateFieldIS().

1211:   Not collective

1213:   Input Parameter:
1214: . dm - the DM object

1216:   Output Parameters:
1217: + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1218: . namelist  - The name for each field (or NULL if not requested)
1219: . islist    - The global indices for each field (or NULL if not requested)
1220: - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)

1222:   Level: intermediate

1224:   Notes:
1225:   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1226:   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1227:   and all of the arrays should be freed with PetscFree().

1229: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1230: @*/
1231: PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1232: {

1237:   if (len) {
1239:     *len = 0;
1240:   }
1241:   if (namelist) {
1243:     *namelist = 0;
1244:   }
1245:   if (islist) {
1247:     *islist = 0;
1248:   }
1249:   if (dmlist) {
1251:     *dmlist = 0;
1252:   }
1253:   /*
1254:    Is it a good idea to apply the following check across all impls?
1255:    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1256:    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1257:    */
1258:   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1259:   if (!dm->ops->createfielddecomposition) {
1260:     PetscSection section;
1261:     PetscInt     numFields, f;

1263:     DMGetDefaultSection(dm, &section);
1264:     if (section) {PetscSectionGetNumFields(section, &numFields);}
1265:     if (section && numFields && dm->ops->createsubdm) {
1266:       *len = numFields;
1267:       PetscMalloc3(numFields,namelist,numFields,islist,numFields,dmlist);
1268:       for (f = 0; f < numFields; ++f) {
1269:         const char *fieldName;

1271:         DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);
1272:         PetscSectionGetFieldName(section, f, &fieldName);
1273:         PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);
1274:       }
1275:     } else {
1276:       DMCreateFieldIS(dm, len, namelist, islist);
1277:       /* By default there are no DMs associated with subproblems. */
1278:       if (dmlist) *dmlist = NULL;
1279:     }
1280:   } else {
1281:     (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);
1282:   }
1283:   return(0);
1284: }

1288: /*@C
1289:   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1290:                   The fields are defined by DMCreateFieldIS().

1292:   Not collective

1294:   Input Parameters:
1295: + dm - the DM object
1296: . numFields - number of fields in this subproblem
1297: - len       - The number of subproblems in the decomposition (or NULL if not requested)

1299:   Output Parameters:
1300: . is - The global indices for the subproblem
1301: - dm - The DM for the subproblem

1303:   Level: intermediate

1305: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1306: @*/
1307: PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1308: {

1316:   if (dm->ops->createsubdm) {
1317:     (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);
1318:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1319:   return(0);
1320: }


1325: /*@C
1326:   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1327:                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1328:                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1329:                           define a nonoverlapping covering, while outer subdomains can overlap.
1330:                           The optional list of DMs define the DM for each subproblem.

1332:   Not collective

1334:   Input Parameter:
1335: . dm - the DM object

1337:   Output Parameters:
1338: + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1339: . namelist    - The name for each subdomain (or NULL if not requested)
1340: . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1341: . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1342: - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)

1344:   Level: intermediate

1346:   Notes:
1347:   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1348:   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1349:   and all of the arrays should be freed with PetscFree().

1351: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1352: @*/
1353: PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1354: {
1355:   PetscErrorCode      ierr;
1356:   DMSubDomainHookLink link;
1357:   PetscInt            i,l;

1366:   /*
1367:    Is it a good idea to apply the following check across all impls?
1368:    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1369:    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1370:    */
1371:   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1372:   if (dm->ops->createdomaindecomposition) {
1373:     (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);
1374:     /* copy subdomain hooks and context over to the subdomain DMs */
1375:     if (dmlist) {
1376:       for (i = 0; i < l; i++) {
1377:         for (link=dm->subdomainhook; link; link=link->next) {
1378:           if (link->ddhook) {(*link->ddhook)(dm,(*dmlist)[i],link->ctx);}
1379:         }
1380:         (*dmlist)[i]->ctx = dm->ctx;
1381:       }
1382:     }
1383:     if (len) *len = l;
1384:   }
1385:   return(0);
1386: }


1391: /*@C
1392:   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector

1394:   Not collective

1396:   Input Parameters:
1397: + dm - the DM object
1398: . n  - the number of subdomain scatters
1399: - subdms - the local subdomains

1401:   Output Parameters:
1402: + n     - the number of scatters returned
1403: . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1404: . oscat - scatter from global vector to overlapping global vector entries on subdomain
1405: - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)

1407:   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1408:   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1409:   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1410:   solution and residual data.

1412:   Level: developer

1414: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1415: @*/
1416: PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1417: {

1423:   if (dm->ops->createddscatters) {
1424:     (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);
1425:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1426:   return(0);
1427: }

1431: /*@
1432:   DMRefine - Refines a DM object

1434:   Collective on DM

1436:   Input Parameter:
1437: + dm   - the DM object
1438: - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)

1440:   Output Parameter:
1441: . dmf - the refined DM, or NULL

1443:   Note: If no refinement was done, the return value is NULL

1445:   Level: developer

1447: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1448: @*/
1449: PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1450: {
1451:   PetscErrorCode   ierr;
1452:   DMRefineHookLink link;

1456:   (*dm->ops->refine)(dm,comm,dmf);
1457:   if (*dmf) {
1458:     (*dmf)->ops->creatematrix = dm->ops->creatematrix;

1460:     PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);

1462:     (*dmf)->ctx       = dm->ctx;
1463:     (*dmf)->leveldown = dm->leveldown;
1464:     (*dmf)->levelup   = dm->levelup + 1;

1466:     DMSetMatType(*dmf,dm->mattype);
1467:     for (link=dm->refinehook; link; link=link->next) {
1468:       if (link->refinehook) {
1469:         (*link->refinehook)(dm,*dmf,link->ctx);
1470:       }
1471:     }
1472:   }
1473:   return(0);
1474: }

1478: /*@C
1479:    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid

1481:    Logically Collective

1483:    Input Arguments:
1484: +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1485: .  refinehook - function to run when setting up a coarser level
1486: .  interphook - function to run to update data on finer levels (once per SNESSolve())
1487: -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)

1489:    Calling sequence of refinehook:
1490: $    refinehook(DM coarse,DM fine,void *ctx);

1492: +  coarse - coarse level DM
1493: .  fine - fine level DM to interpolate problem to
1494: -  ctx - optional user-defined function context

1496:    Calling sequence for interphook:
1497: $    interphook(DM coarse,Mat interp,DM fine,void *ctx)

1499: +  coarse - coarse level DM
1500: .  interp - matrix interpolating a coarse-level solution to the finer grid
1501: .  fine - fine level DM to update
1502: -  ctx - optional user-defined function context

1504:    Level: advanced

1506:    Notes:
1507:    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing

1509:    If this function is called multiple times, the hooks will be run in the order they are added.

1511:    This function is currently not available from Fortran.

1513: .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1514: @*/
1515: PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1516: {
1517:   PetscErrorCode   ierr;
1518:   DMRefineHookLink link,*p;

1522:   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1523:   PetscMalloc(sizeof(struct _DMRefineHookLink),&link);
1524:   link->refinehook = refinehook;
1525:   link->interphook = interphook;
1526:   link->ctx        = ctx;
1527:   link->next       = NULL;
1528:   *p               = link;
1529:   return(0);
1530: }

1534: /*@
1535:    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()

1537:    Collective if any hooks are

1539:    Input Arguments:
1540: +  coarse - coarser DM to use as a base
1541: .  restrct - interpolation matrix, apply using MatInterpolate()
1542: -  fine - finer DM to update

1544:    Level: developer

1546: .seealso: DMRefineHookAdd(), MatInterpolate()
1547: @*/
1548: PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1549: {
1550:   PetscErrorCode   ierr;
1551:   DMRefineHookLink link;

1554:   for (link=fine->refinehook; link; link=link->next) {
1555:     if (link->interphook) {
1556:       (*link->interphook)(coarse,interp,fine,link->ctx);
1557:     }
1558:   }
1559:   return(0);
1560: }

1564: /*@
1565:     DMGetRefineLevel - Get's the number of refinements that have generated this DM.

1567:     Not Collective

1569:     Input Parameter:
1570: .   dm - the DM object

1572:     Output Parameter:
1573: .   level - number of refinements

1575:     Level: developer

1577: .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

1579: @*/
1580: PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1581: {
1584:   *level = dm->levelup;
1585:   return(0);
1586: }

1590: /*@C
1591:    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called

1593:    Logically Collective

1595:    Input Arguments:
1596: +  dm - the DM
1597: .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1598: .  endhook - function to run after DMGlobalToLocalEnd() has completed
1599: -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)

1601:    Calling sequence for beginhook:
1602: $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)

1604: +  dm - global DM
1605: .  g - global vector
1606: .  mode - mode
1607: .  l - local vector
1608: -  ctx - optional user-defined function context


1611:    Calling sequence for endhook:
1612: $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)

1614: +  global - global DM
1615: -  ctx - optional user-defined function context

1617:    Level: advanced

1619: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1620: @*/
1621: PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1622: {
1623:   PetscErrorCode          ierr;
1624:   DMGlobalToLocalHookLink link,*p;

1628:   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1629:   PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);
1630:   link->beginhook = beginhook;
1631:   link->endhook   = endhook;
1632:   link->ctx       = ctx;
1633:   link->next      = NULL;
1634:   *p              = link;
1635:   return(0);
1636: }

1640: /*@
1641:     DMGlobalToLocalBegin - Begins updating local vectors from global vector

1643:     Neighbor-wise Collective on DM

1645:     Input Parameters:
1646: +   dm - the DM object
1647: .   g - the global vector
1648: .   mode - INSERT_VALUES or ADD_VALUES
1649: -   l - the local vector


1652:     Level: beginner

1654: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()

1656: @*/
1657: PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1658: {
1659:   PetscSF                 sf;
1660:   PetscErrorCode          ierr;
1661:   DMGlobalToLocalHookLink link;

1665:   for (link=dm->gtolhook; link; link=link->next) {
1666:     if (link->beginhook) {
1667:       (*link->beginhook)(dm,g,mode,l,link->ctx);
1668:     }
1669:   }
1670:   DMGetDefaultSF(dm, &sf);
1671:   if (sf) {
1672:     PetscScalar *lArray, *gArray;

1674:     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1675:     VecGetArray(l, &lArray);
1676:     VecGetArray(g, &gArray);
1677:     PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);
1678:     VecRestoreArray(l, &lArray);
1679:     VecRestoreArray(g, &gArray);
1680:   } else {
1681:     (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1682:   }
1683:   return(0);
1684: }

1688: /*@
1689:     DMGlobalToLocalEnd - Ends updating local vectors from global vector

1691:     Neighbor-wise Collective on DM

1693:     Input Parameters:
1694: +   dm - the DM object
1695: .   g - the global vector
1696: .   mode - INSERT_VALUES or ADD_VALUES
1697: -   l - the local vector


1700:     Level: beginner

1702: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()

1704: @*/
1705: PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1706: {
1707:   PetscSF                 sf;
1708:   PetscErrorCode          ierr;
1709:   PetscScalar             *lArray, *gArray;
1710:   DMGlobalToLocalHookLink link;

1714:   DMGetDefaultSF(dm, &sf);
1715:   if (sf) {
1716:     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);

1718:     VecGetArray(l, &lArray);
1719:     VecGetArray(g, &gArray);
1720:     PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);
1721:     VecRestoreArray(l, &lArray);
1722:     VecRestoreArray(g, &gArray);
1723:   } else {
1724:     (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1725:   }
1726:   for (link=dm->gtolhook; link; link=link->next) {
1727:     if (link->endhook) {(*link->endhook)(dm,g,mode,l,link->ctx);}
1728:   }
1729:   return(0);
1730: }

1734: /*@
1735:     DMLocalToGlobalBegin - updates global vectors from local vectors

1737:     Neighbor-wise Collective on DM

1739:     Input Parameters:
1740: +   dm - the DM object
1741: .   l - the local vector
1742: .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1743:            base point.
1744: - - the global vector

1746:     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1747:            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1748:            global array to the final global array with VecAXPY().

1750:     Level: beginner

1752: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()

1754: @*/
1755: PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1756: {
1757:   PetscSF        sf;

1762:   DMGetDefaultSF(dm, &sf);
1763:   if (sf) {
1764:     MPI_Op      op;
1765:     PetscScalar *lArray, *gArray;

1767:     switch (mode) {
1768:     case INSERT_VALUES:
1769:     case INSERT_ALL_VALUES:
1770:       op = MPIU_REPLACE; break;
1771:     case ADD_VALUES:
1772:     case ADD_ALL_VALUES:
1773:       op = MPI_SUM; break;
1774:     default:
1775:       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1776:     }
1777:     VecGetArray(l, &lArray);
1778:     VecGetArray(g, &gArray);
1779:     PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);
1780:     VecRestoreArray(l, &lArray);
1781:     VecRestoreArray(g, &gArray);
1782:   } else {
1783:     (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1784:   }
1785:   return(0);
1786: }

1790: /*@
1791:     DMLocalToGlobalEnd - updates global vectors from local vectors

1793:     Neighbor-wise Collective on DM

1795:     Input Parameters:
1796: +   dm - the DM object
1797: .   l - the local vector
1798: .   mode - INSERT_VALUES or ADD_VALUES
1799: -   g - the global vector


1802:     Level: beginner

1804: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()

1806: @*/
1807: PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1808: {
1809:   PetscSF        sf;

1814:   DMGetDefaultSF(dm, &sf);
1815:   if (sf) {
1816:     MPI_Op      op;
1817:     PetscScalar *lArray, *gArray;

1819:     switch (mode) {
1820:     case INSERT_VALUES:
1821:     case INSERT_ALL_VALUES:
1822:       op = MPIU_REPLACE; break;
1823:     case ADD_VALUES:
1824:     case ADD_ALL_VALUES:
1825:       op = MPI_SUM; break;
1826:     default:
1827:       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1828:     }
1829:     VecGetArray(l, &lArray);
1830:     VecGetArray(g, &gArray);
1831:     PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);
1832:     VecRestoreArray(l, &lArray);
1833:     VecRestoreArray(g, &gArray);
1834:   } else {
1835:     (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1836:   }
1837:   return(0);
1838: }

1842: /*@
1843:    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1844:    that contain irrelevant values) to another local vector where the ghost
1845:    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().

1847:    Neighbor-wise Collective on DM and Vec

1849:    Input Parameters:
1850: +  dm - the DM object
1851: .  g - the original local vector
1852: -  mode - one of INSERT_VALUES or ADD_VALUES

1854:    Output Parameter:
1855: .  l  - the local vector with correct ghost values

1857:    Level: intermediate

1859:    Notes:
1860:    The local vectors used here need not be the same as those
1861:    obtained from DMCreateLocalVector(), BUT they
1862:    must have the same parallel data layout; they could, for example, be
1863:    obtained with VecDuplicate() from the DM originating vectors.

1865: .keywords: DM, local-to-local, begin
1866: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()

1868: @*/
1869: PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1870: {
1871:   PetscErrorCode          ierr;

1875:   (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1876:   return(0);
1877: }

1881: /*@
1882:    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1883:    that contain irrelevant values) to another local vector where the ghost
1884:    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().

1886:    Neighbor-wise Collective on DM and Vec

1888:    Input Parameters:
1889: +  da - the DM object
1890: .  g - the original local vector
1891: -  mode - one of INSERT_VALUES or ADD_VALUES

1893:    Output Parameter:
1894: .  l  - the local vector with correct ghost values

1896:    Level: intermediate

1898:    Notes:
1899:    The local vectors used here need not be the same as those
1900:    obtained from DMCreateLocalVector(), BUT they
1901:    must have the same parallel data layout; they could, for example, be
1902:    obtained with VecDuplicate() from the DM originating vectors.

1904: .keywords: DM, local-to-local, end
1905: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()

1907: @*/
1908: PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1909: {
1910:   PetscErrorCode          ierr;

1914:   (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1915:   return(0);
1916: }


1921: /*@
1922:     DMCoarsen - Coarsens a DM object

1924:     Collective on DM

1926:     Input Parameter:
1927: +   dm - the DM object
1928: -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)

1930:     Output Parameter:
1931: .   dmc - the coarsened DM

1933:     Level: developer

1935: .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

1937: @*/
1938: PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1939: {
1940:   PetscErrorCode    ierr;
1941:   DMCoarsenHookLink link;

1945:   (*dm->ops->coarsen)(dm, comm, dmc);
1946:   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1947:   PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);
1948:   (*dmc)->ctx               = dm->ctx;
1949:   (*dmc)->levelup           = dm->levelup;
1950:   (*dmc)->leveldown         = dm->leveldown + 1;
1951:   DMSetMatType(*dmc,dm->mattype);
1952:   for (link=dm->coarsenhook; link; link=link->next) {
1953:     if (link->coarsenhook) {(*link->coarsenhook)(dm,*dmc,link->ctx);}
1954:   }
1955:   return(0);
1956: }

1960: /*@C
1961:    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid

1963:    Logically Collective

1965:    Input Arguments:
1966: +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1967: .  coarsenhook - function to run when setting up a coarser level
1968: .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1969: -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)

1971:    Calling sequence of coarsenhook:
1972: $    coarsenhook(DM fine,DM coarse,void *ctx);

1974: +  fine - fine level DM
1975: .  coarse - coarse level DM to restrict problem to
1976: -  ctx - optional user-defined function context

1978:    Calling sequence for restricthook:
1979: $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)

1981: +  fine - fine level DM
1982: .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1983: .  rscale - scaling vector for restriction
1984: .  inject - matrix restricting by injection
1985: .  coarse - coarse level DM to update
1986: -  ctx - optional user-defined function context

1988:    Level: advanced

1990:    Notes:
1991:    This function is only needed if auxiliary data needs to be set up on coarse grids.

1993:    If this function is called multiple times, the hooks will be run in the order they are added.

1995:    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1996:    extract the finest level information from its context (instead of from the SNES).

1998:    This function is currently not available from Fortran.

2000: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2001: @*/
2002: PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2003: {
2004:   PetscErrorCode    ierr;
2005:   DMCoarsenHookLink link,*p;

2009:   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2010:   PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);
2011:   link->coarsenhook  = coarsenhook;
2012:   link->restricthook = restricthook;
2013:   link->ctx          = ctx;
2014:   link->next         = NULL;
2015:   *p                 = link;
2016:   return(0);
2017: }

2021: /*@
2022:    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()

2024:    Collective if any hooks are

2026:    Input Arguments:
2027: +  fine - finer DM to use as a base
2028: .  restrct - restriction matrix, apply using MatRestrict()
2029: .  inject - injection matrix, also use MatRestrict()
2030: -  coarse - coarer DM to update

2032:    Level: developer

2034: .seealso: DMCoarsenHookAdd(), MatRestrict()
2035: @*/
2036: PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2037: {
2038:   PetscErrorCode    ierr;
2039:   DMCoarsenHookLink link;

2042:   for (link=fine->coarsenhook; link; link=link->next) {
2043:     if (link->restricthook) {
2044:       (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);
2045:     }
2046:   }
2047:   return(0);
2048: }

2052: /*@C
2053:    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid

2055:    Logically Collective

2057:    Input Arguments:
2058: +  global - global DM
2059: .  ddhook - function to run to pass data to the decomposition DM upon its creation
2060: .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2061: -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)


2064:    Calling sequence for ddhook:
2065: $    ddhook(DM global,DM block,void *ctx)

2067: +  global - global DM
2068: .  block  - block DM
2069: -  ctx - optional user-defined function context

2071:    Calling sequence for restricthook:
2072: $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)

2074: +  global - global DM
2075: .  out    - scatter to the outer (with ghost and overlap points) block vector
2076: .  in     - scatter to block vector values only owned locally
2077: .  block  - block DM
2078: -  ctx - optional user-defined function context

2080:    Level: advanced

2082:    Notes:
2083:    This function is only needed if auxiliary data needs to be set up on subdomain DMs.

2085:    If this function is called multiple times, the hooks will be run in the order they are added.

2087:    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2088:    extract the global information from its context (instead of from the SNES).

2090:    This function is currently not available from Fortran.

2092: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2093: @*/
2094: PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2095: {
2096:   PetscErrorCode      ierr;
2097:   DMSubDomainHookLink link,*p;

2101:   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2102:   PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);
2103:   link->restricthook = restricthook;
2104:   link->ddhook       = ddhook;
2105:   link->ctx          = ctx;
2106:   link->next         = NULL;
2107:   *p                 = link;
2108:   return(0);
2109: }

2113: /*@
2114:    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()

2116:    Collective if any hooks are

2118:    Input Arguments:
2119: +  fine - finer DM to use as a base
2120: .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2121: .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2122: -  coarse - coarer DM to update

2124:    Level: developer

2126: .seealso: DMCoarsenHookAdd(), MatRestrict()
2127: @*/
2128: PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2129: {
2130:   PetscErrorCode      ierr;
2131:   DMSubDomainHookLink link;

2134:   for (link=global->subdomainhook; link; link=link->next) {
2135:     if (link->restricthook) {
2136:       (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);
2137:     }
2138:   }
2139:   return(0);
2140: }

2144: /*@
2145:     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.

2147:     Not Collective

2149:     Input Parameter:
2150: .   dm - the DM object

2152:     Output Parameter:
2153: .   level - number of coarsenings

2155:     Level: developer

2157: .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

2159: @*/
2160: PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2161: {
2164:   *level = dm->leveldown;
2165:   return(0);
2166: }



2172: /*@C
2173:     DMRefineHierarchy - Refines a DM object, all levels at once

2175:     Collective on DM

2177:     Input Parameter:
2178: +   dm - the DM object
2179: -   nlevels - the number of levels of refinement

2181:     Output Parameter:
2182: .   dmf - the refined DM hierarchy

2184:     Level: developer

2186: .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

2188: @*/
2189: PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2190: {

2195:   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2196:   if (nlevels == 0) return(0);
2197:   if (dm->ops->refinehierarchy) {
2198:     (*dm->ops->refinehierarchy)(dm,nlevels,dmf);
2199:   } else if (dm->ops->refine) {
2200:     PetscInt i;

2202:     DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);
2203:     for (i=1; i<nlevels; i++) {
2204:       DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);
2205:     }
2206:   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2207:   return(0);
2208: }

2212: /*@C
2213:     DMCoarsenHierarchy - Coarsens a DM object, all levels at once

2215:     Collective on DM

2217:     Input Parameter:
2218: +   dm - the DM object
2219: -   nlevels - the number of levels of coarsening

2221:     Output Parameter:
2222: .   dmc - the coarsened DM hierarchy

2224:     Level: developer

2226: .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()

2228: @*/
2229: PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2230: {

2235:   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2236:   if (nlevels == 0) return(0);
2238:   if (dm->ops->coarsenhierarchy) {
2239:     (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);
2240:   } else if (dm->ops->coarsen) {
2241:     PetscInt i;

2243:     DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);
2244:     for (i=1; i<nlevels; i++) {
2245:       DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);
2246:     }
2247:   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2248:   return(0);
2249: }

2253: /*@
2254:    DMCreateAggregates - Gets the aggregates that map between
2255:    grids associated with two DMs.

2257:    Collective on DM

2259:    Input Parameters:
2260: +  dmc - the coarse grid DM
2261: -  dmf - the fine grid DM

2263:    Output Parameters:
2264: .  rest - the restriction matrix (transpose of the projection matrix)

2266:    Level: intermediate

2268: .keywords: interpolation, restriction, multigrid

2270: .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2271: @*/
2272: PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2273: {

2279:   (*dmc->ops->getaggregates)(dmc, dmf, rest);
2280:   return(0);
2281: }

2285: /*@C
2286:     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed

2288:     Not Collective

2290:     Input Parameters:
2291: +   dm - the DM object
2292: -   destroy - the destroy function

2294:     Level: intermediate

2296: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2298: @*/
2299: PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2300: {
2303:   dm->ctxdestroy = destroy;
2304:   return(0);
2305: }

2309: /*@
2310:     DMSetApplicationContext - Set a user context into a DM object

2312:     Not Collective

2314:     Input Parameters:
2315: +   dm - the DM object
2316: -   ctx - the user context

2318:     Level: intermediate

2320: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2322: @*/
2323: PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2324: {
2327:   dm->ctx = ctx;
2328:   return(0);
2329: }

2333: /*@
2334:     DMGetApplicationContext - Gets a user context from a DM object

2336:     Not Collective

2338:     Input Parameter:
2339: .   dm - the DM object

2341:     Output Parameter:
2342: .   ctx - the user context

2344:     Level: intermediate

2346: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2348: @*/
2349: PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2350: {
2353:   *(void**)ctx = dm->ctx;
2354:   return(0);
2355: }

2359: /*@C
2360:     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.

2362:     Logically Collective on DM

2364:     Input Parameter:
2365: +   dm - the DM object
2366: -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)

2368:     Level: intermediate

2370: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2371:          DMSetJacobian()

2373: @*/
2374: PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2375: {
2377:   dm->ops->computevariablebounds = f;
2378:   return(0);
2379: }

2383: /*@
2384:     DMHasVariableBounds - does the DM object have a variable bounds function?

2386:     Not Collective

2388:     Input Parameter:
2389: .   dm - the DM object to destroy

2391:     Output Parameter:
2392: .   flg - PETSC_TRUE if the variable bounds function exists

2394:     Level: developer

2396: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2398: @*/
2399: PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2400: {
2402:   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2403:   return(0);
2404: }

2408: /*@C
2409:     DMComputeVariableBounds - compute variable bounds used by SNESVI.

2411:     Logically Collective on DM

2413:     Input Parameters:
2414: +   dm - the DM object to destroy
2415: -   x  - current solution at which the bounds are computed

2417:     Output parameters:
2418: +   xl - lower bound
2419: -   xu - upper bound

2421:     Level: intermediate

2423: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2425: @*/
2426: PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2427: {

2433:   if (dm->ops->computevariablebounds) {
2434:     (*dm->ops->computevariablebounds)(dm, xl,xu);
2435:   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2436:   return(0);
2437: }

2441: /*@
2442:     DMHasColoring - does the DM object have a method of providing a coloring?

2444:     Not Collective

2446:     Input Parameter:
2447: .   dm - the DM object

2449:     Output Parameter:
2450: .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().

2452:     Level: developer

2454: .seealso DMHasFunction(), DMCreateColoring()

2456: @*/
2457: PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2458: {
2460:   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2461:   return(0);
2462: }

2464: #undef  __FUNCT__
2466: /*@C
2467:     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.

2469:     Collective on DM

2471:     Input Parameter:
2472: +   dm - the DM object
2473: -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.

2475:     Level: developer

2477: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()

2479: @*/
2480: PetscErrorCode  DMSetVec(DM dm,Vec x)
2481: {

2485:   if (x) {
2486:     if (!dm->x) {
2487:       DMCreateGlobalVector(dm,&dm->x);
2488:     }
2489:     VecCopy(x,dm->x);
2490:   } else if (dm->x) {
2491:     VecDestroy(&dm->x);
2492:   }
2493:   return(0);
2494: }

2496: PetscFunctionList DMList              = NULL;
2497: PetscBool         DMRegisterAllCalled = PETSC_FALSE;

2501: /*@C
2502:   DMSetType - Builds a DM, for a particular DM implementation.

2504:   Collective on DM

2506:   Input Parameters:
2507: + dm     - The DM object
2508: - method - The name of the DM type

2510:   Options Database Key:
2511: . -dm_type <type> - Sets the DM type; use -help for a list of available types

2513:   Notes:
2514:   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).

2516:   Level: intermediate

2518: .keywords: DM, set, type
2519: .seealso: DMGetType(), DMCreate()
2520: @*/
2521: PetscErrorCode  DMSetType(DM dm, DMType method)
2522: {
2523:   PetscErrorCode (*r)(DM);
2524:   PetscBool      match;

2529:   PetscObjectTypeCompare((PetscObject) dm, method, &match);
2530:   if (match) return(0);

2532:   if (!DMRegisterAllCalled) {DMRegisterAll();}
2533:   PetscFunctionListFind(DMList,method,&r);
2534:   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);

2536:   if (dm->ops->destroy) {
2537:     (*dm->ops->destroy)(dm);
2538:     dm->ops->destroy = NULL;
2539:   }
2540:   (*r)(dm);
2541:   PetscObjectChangeTypeName((PetscObject)dm,method);
2542:   return(0);
2543: }

2547: /*@C
2548:   DMGetType - Gets the DM type name (as a string) from the DM.

2550:   Not Collective

2552:   Input Parameter:
2553: . dm  - The DM

2555:   Output Parameter:
2556: . type - The DM type name

2558:   Level: intermediate

2560: .keywords: DM, get, type, name
2561: .seealso: DMSetType(), DMCreate()
2562: @*/
2563: PetscErrorCode  DMGetType(DM dm, DMType *type)
2564: {

2570:   if (!DMRegisterAllCalled) {
2571:     DMRegisterAll();
2572:   }
2573:   *type = ((PetscObject)dm)->type_name;
2574:   return(0);
2575: }

2579: /*@C
2580:   DMConvert - Converts a DM to another DM, either of the same or different type.

2582:   Collective on DM

2584:   Input Parameters:
2585: + dm - the DM
2586: - newtype - new DM type (use "same" for the same type)

2588:   Output Parameter:
2589: . M - pointer to new DM

2591:   Notes:
2592:   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2593:   the MPI communicator of the generated DM is always the same as the communicator
2594:   of the input DM.

2596:   Level: intermediate

2598: .seealso: DMCreate()
2599: @*/
2600: PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2601: {
2602:   DM             B;
2603:   char           convname[256];
2604:   PetscBool      sametype, issame;

2611:   PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);
2612:   PetscStrcmp(newtype, "same", &issame);
2613:   {
2614:     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;

2616:     /*
2617:        Order of precedence:
2618:        1) See if a specialized converter is known to the current DM.
2619:        2) See if a specialized converter is known to the desired DM class.
2620:        3) See if a good general converter is registered for the desired class
2621:        4) See if a good general converter is known for the current matrix.
2622:        5) Use a really basic converter.
2623:     */

2625:     /* 1) See if a specialized converter is known to the current DM and the desired class */
2626:     PetscStrcpy(convname,"DMConvert_");
2627:     PetscStrcat(convname,((PetscObject) dm)->type_name);
2628:     PetscStrcat(convname,"_");
2629:     PetscStrcat(convname,newtype);
2630:     PetscStrcat(convname,"_C");
2631:     PetscObjectQueryFunction((PetscObject)dm,convname,&conv);
2632:     if (conv) goto foundconv;

2634:     /* 2)  See if a specialized converter is known to the desired DM class. */
2635:     DMCreate(PetscObjectComm((PetscObject)dm), &B);
2636:     DMSetType(B, newtype);
2637:     PetscStrcpy(convname,"DMConvert_");
2638:     PetscStrcat(convname,((PetscObject) dm)->type_name);
2639:     PetscStrcat(convname,"_");
2640:     PetscStrcat(convname,newtype);
2641:     PetscStrcat(convname,"_C");
2642:     PetscObjectQueryFunction((PetscObject)B,convname,&conv);
2643:     if (conv) {
2644:       DMDestroy(&B);
2645:       goto foundconv;
2646:     }

2648: #if 0
2649:     /* 3) See if a good general converter is registered for the desired class */
2650:     conv = B->ops->convertfrom;
2651:     DMDestroy(&B);
2652:     if (conv) goto foundconv;

2654:     /* 4) See if a good general converter is known for the current matrix */
2655:     if (dm->ops->convert) {
2656:       conv = dm->ops->convert;
2657:     }
2658:     if (conv) goto foundconv;
2659: #endif

2661:     /* 5) Use a really basic converter. */
2662:     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);

2664: foundconv:
2665:     PetscLogEventBegin(DM_Convert,dm,0,0,0);
2666:     (*conv)(dm,newtype,M);
2667:     PetscLogEventEnd(DM_Convert,dm,0,0,0);
2668:   }
2669:   PetscObjectStateIncrease((PetscObject) *M);
2670:   return(0);
2671: }

2673: /*--------------------------------------------------------------------------------------------------------------------*/

2677: /*@C
2678:   DMRegister -  Adds a new DM component implementation

2680:   Not Collective

2682:   Input Parameters:
2683: + name        - The name of a new user-defined creation routine
2684: - create_func - The creation routine itself

2686:   Notes:
2687:   DMRegister() may be called multiple times to add several user-defined DMs


2690:   Sample usage:
2691: .vb
2692:     DMRegister("my_da", MyDMCreate);
2693: .ve

2695:   Then, your DM type can be chosen with the procedural interface via
2696: .vb
2697:     DMCreate(MPI_Comm, DM *);
2698:     DMSetType(DM,"my_da");
2699: .ve
2700:    or at runtime via the option
2701: .vb
2702:     -da_type my_da
2703: .ve

2705:   Level: advanced

2707: .keywords: DM, register
2708: .seealso: DMRegisterAll(), DMRegisterDestroy()

2710: @*/
2711: PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2712: {

2716:   PetscFunctionListAdd(&DMList,sname,function);
2717:   return(0);
2718: }

2722: /*@C
2723:   DMLoad - Loads a DM that has been stored in binary  with DMView().

2725:   Collective on PetscViewer

2727:   Input Parameters:
2728: + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2729:            some related function before a call to DMLoad().
2730: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2731:            HDF5 file viewer, obtained from PetscViewerHDF5Open()

2733:    Level: intermediate

2735:   Notes:
2736:    The type is determined by the data in the file, any type set into the DM before this call is ignored.

2738:   Notes for advanced users:
2739:   Most users should not need to know the details of the binary storage
2740:   format, since DMLoad() and DMView() completely hide these details.
2741:   But for anyone who's interested, the standard binary matrix storage
2742:   format is
2743: .vb
2744:      has not yet been determined
2745: .ve

2747: .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2748: @*/
2749: PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2750: {
2752:   PetscBool      isbinary;
2753:   PetscInt       classid;
2754:   char           type[256];

2759:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
2760:   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");

2762:   PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);
2763:   if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2764:   PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);
2765:   DMSetType(newdm, type);
2766:   if (newdm->ops->load) {
2767:     (*newdm->ops->load)(newdm,viewer);
2768:   }
2769:   return(0);
2770: }

2772: /******************************** FEM Support **********************************/

2776: PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2777: {
2778:   PetscInt       f;

2782:   PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2783:   for (f = 0; f < len; ++f) {
2784:     PetscPrintf(PETSC_COMM_SELF, "  | %g |\n", (double)PetscRealPart(x[f]));
2785:   }
2786:   return(0);
2787: }

2791: PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2792: {
2793:   PetscInt       f, g;

2797:   PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2798:   for (f = 0; f < rows; ++f) {
2799:     PetscPrintf(PETSC_COMM_SELF, "  |");
2800:     for (g = 0; g < cols; ++g) {
2801:       PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));
2802:     }
2803:     PetscPrintf(PETSC_COMM_SELF, " |\n");
2804:   }
2805:   return(0);
2806: }

2810: PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2811: {
2812:   PetscMPIInt    rank, numProcs;
2813:   PetscInt       p;

2817:   MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
2818:   MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
2819:   PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);
2820:   for (p = 0; p < numProcs; ++p) {
2821:     if (p == rank) {
2822:       Vec x;

2824:       VecDuplicate(X, &x);
2825:       VecCopy(X, x);
2826:       VecChop(x, tol);
2827:       VecView(x, PETSC_VIEWER_STDOUT_SELF);
2828:       VecDestroy(&x);
2829:       PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);
2830:     }
2831:     PetscBarrier((PetscObject) dm);
2832:   }
2833:   return(0);
2834: }

2838: /*@
2839:   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.

2841:   Input Parameter:
2842: . dm - The DM

2844:   Output Parameter:
2845: . section - The PetscSection

2847:   Level: intermediate

2849:   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.

2851: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2852: @*/
2853: PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2854: {
2858:   *section = dm->defaultSection;
2859:   return(0);
2860: }

2864: /*@
2865:   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.

2867:   Input Parameters:
2868: + dm - The DM
2869: - section - The PetscSection

2871:   Level: intermediate

2873:   Note: Any existing Section will be destroyed

2875: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2876: @*/
2877: PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2878: {
2879:   PetscInt       numFields;
2880:   PetscInt       f;

2886:   PetscObjectReference((PetscObject)section);
2887:   PetscSectionDestroy(&dm->defaultSection);
2888:   dm->defaultSection = section;
2889:   PetscSectionGetNumFields(dm->defaultSection, &numFields);
2890:   if (numFields) {
2891:     DMSetNumFields(dm, numFields);
2892:     for (f = 0; f < numFields; ++f) {
2893:       const char *name;

2895:       PetscSectionGetFieldName(dm->defaultSection, f, &name);
2896:       PetscObjectSetName(dm->fields[f], name);
2897:     }
2898:   }
2899:   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2900:   PetscSectionDestroy(&dm->defaultGlobalSection);
2901:   return(0);
2902: }

2906: /*@
2907:   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.

2909:   Collective on DM

2911:   Input Parameter:
2912: . dm - The DM

2914:   Output Parameter:
2915: . section - The PetscSection

2917:   Level: intermediate

2919:   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.

2921: .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2922: @*/
2923: PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2924: {

2930:   if (!dm->defaultGlobalSection) {
2931:     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2932:     PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);
2933:     PetscLayoutDestroy(&dm->map);
2934:     PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);
2935:   }
2936:   *section = dm->defaultGlobalSection;
2937:   return(0);
2938: }

2942: /*@
2943:   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.

2945:   Input Parameters:
2946: + dm - The DM
2947: - section - The PetscSection, or NULL

2949:   Level: intermediate

2951:   Note: Any existing Section will be destroyed

2953: .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2954: @*/
2955: PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2956: {

2962:   PetscObjectReference((PetscObject)section);
2963:   PetscSectionDestroy(&dm->defaultGlobalSection);
2964:   dm->defaultGlobalSection = section;
2965:   return(0);
2966: }

2970: /*@
2971:   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2972:   it is created from the default PetscSection layouts in the DM.

2974:   Input Parameter:
2975: . dm - The DM

2977:   Output Parameter:
2978: . sf - The PetscSF

2980:   Level: intermediate

2982:   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.

2984: .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2985: @*/
2986: PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2987: {
2988:   PetscInt       nroots;

2994:   PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);
2995:   if (nroots < 0) {
2996:     PetscSection section, gSection;

2998:     DMGetDefaultSection(dm, &section);
2999:     if (section) {
3000:       DMGetDefaultGlobalSection(dm, &gSection);
3001:       DMCreateDefaultSF(dm, section, gSection);
3002:     } else {
3003:       *sf = NULL;
3004:       return(0);
3005:     }
3006:   }
3007:   *sf = dm->defaultSF;
3008:   return(0);
3009: }

3013: /*@
3014:   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM

3016:   Input Parameters:
3017: + dm - The DM
3018: - sf - The PetscSF

3020:   Level: intermediate

3022:   Note: Any previous SF is destroyed

3024: .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3025: @*/
3026: PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3027: {

3033:   PetscSFDestroy(&dm->defaultSF);
3034:   dm->defaultSF = sf;
3035:   return(0);
3036: }

3040: /*@C
3041:   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3042:   describing the data layout.

3044:   Input Parameters:
3045: + dm - The DM
3046: . localSection - PetscSection describing the local data layout
3047: - globalSection - PetscSection describing the global data layout

3049:   Level: intermediate

3051: .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3052: @*/
3053: PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3054: {
3055:   MPI_Comm       comm;
3056:   PetscLayout    layout;
3057:   const PetscInt *ranges;
3058:   PetscInt       *local;
3059:   PetscSFNode    *remote;
3060:   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3061:   PetscMPIInt    size, rank;

3065:   PetscObjectGetComm((PetscObject)dm,&comm);
3067:   MPI_Comm_size(comm, &size);
3068:   MPI_Comm_rank(comm, &rank);
3069:   PetscSectionGetChart(globalSection, &pStart, &pEnd);
3070:   PetscSectionGetConstrainedStorageSize(globalSection, &nroots);
3071:   PetscLayoutCreate(comm, &layout);
3072:   PetscLayoutSetBlockSize(layout, 1);
3073:   PetscLayoutSetLocalSize(layout, nroots);
3074:   PetscLayoutSetUp(layout);
3075:   PetscLayoutGetRanges(layout, &ranges);
3076:   for (p = pStart; p < pEnd; ++p) {
3077:     PetscInt gdof, gcdof;

3079:     PetscSectionGetDof(globalSection, p, &gdof);
3080:     PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3081:     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3082:   }
3083:   PetscMalloc1(nleaves, &local);
3084:   PetscMalloc1(nleaves, &remote);
3085:   for (p = pStart, l = 0; p < pEnd; ++p) {
3086:     const PetscInt *cind;
3087:     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;

3089:     PetscSectionGetDof(localSection, p, &dof);
3090:     PetscSectionGetOffset(localSection, p, &off);
3091:     PetscSectionGetConstraintDof(localSection, p, &cdof);
3092:     PetscSectionGetConstraintIndices(localSection, p, &cind);
3093:     PetscSectionGetDof(globalSection, p, &gdof);
3094:     PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3095:     PetscSectionGetOffset(globalSection, p, &goff);
3096:     if (!gdof) continue; /* Censored point */
3097:     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3098:     if (gsize != dof-cdof) {
3099:       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3100:       cdof = 0; /* Ignore constraints */
3101:     }
3102:     for (d = 0, c = 0; d < dof; ++d) {
3103:       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3104:       local[l+d-c] = off+d;
3105:     }
3106:     if (gdof < 0) {
3107:       for (d = 0; d < gsize; ++d, ++l) {
3108:         PetscInt offset = -(goff+1) + d, r;

3110:         PetscFindInt(offset,size+1,ranges,&r);
3111:         if (r < 0) r = -(r+2);
3112:         if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff);
3113:         remote[l].rank  = r;
3114:         remote[l].index = offset - ranges[r];
3115:       }
3116:     } else {
3117:       for (d = 0; d < gsize; ++d, ++l) {
3118:         remote[l].rank  = rank;
3119:         remote[l].index = goff+d - ranges[rank];
3120:       }
3121:     }
3122:   }
3123:   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3124:   PetscLayoutDestroy(&layout);
3125:   PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
3126:   return(0);
3127: }

3131: /*@
3132:   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.

3134:   Input Parameter:
3135: . dm - The DM

3137:   Output Parameter:
3138: . sf - The PetscSF

3140:   Level: intermediate

3142:   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.

3144: .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3145: @*/
3146: PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3147: {
3151:   *sf = dm->sf;
3152:   return(0);
3153: }

3157: /*@
3158:   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.

3160:   Input Parameters:
3161: + dm - The DM
3162: - sf - The PetscSF

3164:   Level: intermediate

3166: .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3167: @*/
3168: PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3169: {

3175:   PetscSFDestroy(&dm->sf);
3176:   PetscObjectReference((PetscObject) sf);
3177:   dm->sf = sf;
3178:   return(0);
3179: }

3183: PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3184: {
3188:   *numFields = dm->numFields;
3189:   return(0);
3190: }

3194: PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3195: {
3196:   PetscInt       f;

3201:   for (f = 0; f < dm->numFields; ++f) {
3202:     PetscObjectDestroy(&dm->fields[f]);
3203:   }
3204:   PetscFree(dm->fields);
3205:   dm->numFields = numFields;
3206:   PetscMalloc1(dm->numFields, &dm->fields);
3207:   for (f = 0; f < dm->numFields; ++f) {
3208:     PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);
3209:   }
3210:   return(0);
3211: }

3215: PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3216: {
3220:   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3221:   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3222:   *field = dm->fields[f];
3223:   return(0);
3224: }

3228: PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3229: {
3230:   DM dm_coord,dmc_coord;
3232:   Vec coords,ccoords;
3233:   VecScatter scat;
3235:   DMGetCoordinateDM(dm,&dm_coord);
3236:   DMGetCoordinateDM(dmc,&dmc_coord);
3237:   DMGetCoordinates(dm,&coords);
3238:   DMGetCoordinates(dmc,&ccoords);
3239:   if (coords && !ccoords) {
3240:     DMCreateGlobalVector(dmc_coord,&ccoords);
3241:     DMCreateInjection(dmc_coord,dm_coord,&scat);
3242:     VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3243:     VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3244:     DMSetCoordinates(dmc,ccoords);
3245:     VecScatterDestroy(&scat);
3246:     VecDestroy(&ccoords);
3247:   }
3248:   return(0);
3249: }

3253: static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3254: {
3255:   DM dm_coord,subdm_coord;
3257:   Vec coords,ccoords,clcoords;
3258:   VecScatter *scat_i,*scat_g;
3260:   DMGetCoordinateDM(dm,&dm_coord);
3261:   DMGetCoordinateDM(subdm,&subdm_coord);
3262:   DMGetCoordinates(dm,&coords);
3263:   DMGetCoordinates(subdm,&ccoords);
3264:   if (coords && !ccoords) {
3265:     DMCreateGlobalVector(subdm_coord,&ccoords);
3266:     DMCreateLocalVector(subdm_coord,&clcoords);
3267:     DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);
3268:     VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3269:     VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);
3270:     VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3271:     VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);
3272:     DMSetCoordinates(subdm,ccoords);
3273:     DMSetCoordinatesLocal(subdm,clcoords);
3274:     VecScatterDestroy(&scat_i[0]);
3275:     VecScatterDestroy(&scat_g[0]);
3276:     VecDestroy(&ccoords);
3277:     VecDestroy(&clcoords);
3278:     PetscFree(scat_i);
3279:     PetscFree(scat_g);
3280:   }
3281:   return(0);
3282: }

3286: /*@
3287:   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates

3289:   Collective on DM

3291:   Input Parameters:
3292: + dm - the DM
3293: - c - coordinate vector

3295:   Note:
3296:   The coordinates do include those for ghost points, which are in the local vector

3298:   Level: intermediate

3300: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3301: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3302: @*/
3303: PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3304: {

3310:   PetscObjectReference((PetscObject) c);
3311:   VecDestroy(&dm->coordinates);
3312:   dm->coordinates = c;
3313:   VecDestroy(&dm->coordinatesLocal);
3314:   DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);
3315:   DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);
3316:   return(0);
3317: }

3321: /*@
3322:   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates

3324:   Collective on DM

3326:    Input Parameters:
3327: +  dm - the DM
3328: -  c - coordinate vector

3330:   Note:
3331:   The coordinates of ghost points can be set using DMSetCoordinates()
3332:   followed by DMGetCoordinatesLocal(). This is intended to enable the
3333:   setting of ghost coordinates outside of the domain.

3335:   Level: intermediate

3337: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3338: .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3339: @*/
3340: PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3341: {

3347:   PetscObjectReference((PetscObject) c);
3348:   VecDestroy(&dm->coordinatesLocal);

3350:   dm->coordinatesLocal = c;

3352:   VecDestroy(&dm->coordinates);
3353:   return(0);
3354: }

3358: /*@
3359:   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.

3361:   Not Collective

3363:   Input Parameter:
3364: . dm - the DM

3366:   Output Parameter:
3367: . c - global coordinate vector

3369:   Note:
3370:   This is a borrowed reference, so the user should NOT destroy this vector

3372:   Each process has only the local coordinates (does NOT have the ghost coordinates).

3374:   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3375:   and (x_0,y_0,z_0,x_1,y_1,z_1...)

3377:   Level: intermediate

3379: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3380: .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3381: @*/
3382: PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3383: {

3389:   if (!dm->coordinates && dm->coordinatesLocal) {
3390:     DM cdm = NULL;

3392:     DMGetCoordinateDM(dm, &cdm);
3393:     DMCreateGlobalVector(cdm, &dm->coordinates);
3394:     PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");
3395:     DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3396:     DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3397:   }
3398:   *c = dm->coordinates;
3399:   return(0);
3400: }

3404: /*@
3405:   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.

3407:   Collective on DM

3409:   Input Parameter:
3410: . dm - the DM

3412:   Output Parameter:
3413: . c - coordinate vector

3415:   Note:
3416:   This is a borrowed reference, so the user should NOT destroy this vector

3418:   Each process has the local and ghost coordinates

3420:   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3421:   and (x_0,y_0,z_0,x_1,y_1,z_1...)

3423:   Level: intermediate

3425: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3426: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3427: @*/
3428: PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3429: {

3435:   if (!dm->coordinatesLocal && dm->coordinates) {
3436:     DM cdm = NULL;

3438:     DMGetCoordinateDM(dm, &cdm);
3439:     DMCreateLocalVector(cdm, &dm->coordinatesLocal);
3440:     PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");
3441:     DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3442:     DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3443:   }
3444:   *c = dm->coordinatesLocal;
3445:   return(0);
3446: }

3450: /*@
3451:   DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates

3453:   Collective on DM

3455:   Input Parameter:
3456: . dm - the DM

3458:   Output Parameter:
3459: . cdm - coordinate DM

3461:   Level: intermediate

3463: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3464: .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3465: @*/
3466: PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3467: {

3473:   if (!dm->coordinateDM) {
3474:     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3475:     (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);
3476:   }
3477:   *cdm = dm->coordinateDM;
3478:   return(0);
3479: }

3483: /*@
3484:   DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates

3486:   Logically Collective on DM

3488:   Input Parameters:
3489: + dm - the DM
3490: - cdm - coordinate DM

3492:   Level: intermediate

3494: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3495: .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3496: @*/
3497: PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3498: {

3504:   DMDestroy(&dm->coordinateDM);
3505:   dm->coordinateDM = cdm;
3506:   PetscObjectReference((PetscObject) dm->coordinateDM);
3507:   return(0);
3508: }

3512: /*@
3513:   DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh.

3515:   Not Collective

3517:   Input Parameter:
3518: . dm - The DM object

3520:   Output Parameter:
3521: . section - The PetscSection object

3523:   Level: intermediate

3525: .keywords: mesh, coordinates
3526: .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection()
3527: @*/
3528: PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section)
3529: {
3530:   DM             cdm;

3536:   DMGetCoordinateDM(dm, &cdm);
3537:   DMGetDefaultSection(cdm, section);
3538:   return(0);
3539: }

3543: /*@
3544:   DMSetCoordinateSection - Set the layout of coordinate values over the mesh.

3546:   Not Collective

3548:   Input Parameters:
3549: + dm      - The DM object
3550: - section - The PetscSection object

3552:   Level: intermediate

3554: .keywords: mesh, coordinates
3555: .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection()
3556: @*/
3557: PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section)
3558: {
3559:   DM             cdm;

3565:   DMGetCoordinateDM(dm, &cdm);
3566:   DMSetDefaultSection(cdm, section);
3567:   return(0);
3568: }

3572: /*@
3573:   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells

3575:   Not collective

3577:   Input Parameters:
3578: + dm - The DM
3579: - v - The Vec of points

3581:   Output Parameter:
3582: . cells - The local cell numbers for cells which contain the points

3584:   Level: developer

3586: .keywords: point location, mesh
3587: .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3588: @*/
3589: PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3590: {

3597:   if (dm->ops->locatepoints) {
3598:     (*dm->ops->locatepoints)(dm,v,cells);
3599:   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3600:   return(0);
3601: }