Actual source code: da.c
1: #include <petsc/private/dmdaimpl.h>
3: /*@
4: DMDASetSizes - Sets the number of grid points in the three dimensional directions
6: Logically Collective on da
8: Input Parameters:
9: + da - the `DMDA`
10: . M - the global X size
11: . N - the global Y size
12: - P - the global Z size
14: Level: intermediate
16: Developer Note:
17: Since the dimension may not yet have been set the code cannot error check for non-positive Y and Z number of grid points
19: .seealso: `DM`, `DMDA`, `PetscSplitOwnership()`
20: @*/
21: PetscErrorCode DMDASetSizes(DM da, PetscInt M, PetscInt N, PetscInt P)
22: {
23: DM_DA *dd = (DM_DA *)da->data;
25: PetscFunctionBegin;
30: PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
31: PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in X direction must be positive");
32: PetscCheck(N >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Y direction must be positive");
33: PetscCheck(P >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Number of grid points in Z direction must be positive");
35: dd->M = M;
36: dd->N = N;
37: dd->P = P;
38: PetscFunctionReturn(PETSC_SUCCESS);
39: }
41: /*@
42: DMDASetNumProcs - Sets the number of processes in each dimension
44: Logically Collective on da
46: Input Parameters:
47: + da - the `DMDA`
48: . m - the number of X procs (or `PETSC_DECIDE`)
49: . n - the number of Y procs (or `PETSC_DECIDE`)
50: - p - the number of Z procs (or `PETSC_DECIDE`)
52: Level: intermediate
54: .seealso: `DM`, `DMDA`, `DMDASetSizes()`, `PetscSplitOwnership()`
55: @*/
56: PetscErrorCode DMDASetNumProcs(DM da, PetscInt m, PetscInt n, PetscInt p)
57: {
58: DM_DA *dd = (DM_DA *)da->data;
60: PetscFunctionBegin;
65: PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
66: dd->m = m;
67: dd->n = n;
68: dd->p = p;
69: if (da->dim == 2) {
70: PetscMPIInt size;
71: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)da), &size));
72: if ((dd->m > 0) && (dd->n < 0)) {
73: dd->n = size / dd->m;
74: PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in X direction not divisible into comm size %d", m, size);
75: }
76: if ((dd->n > 0) && (dd->m < 0)) {
77: dd->m = size / dd->n;
78: PetscCheck(dd->n * dd->m == size, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "%" PetscInt_FMT " processes in Y direction not divisible into comm size %d", n, size);
79: }
80: }
81: PetscFunctionReturn(PETSC_SUCCESS);
82: }
84: /*@
85: DMDASetBoundaryType - Sets the type of ghost nodes on domain boundaries.
87: Not collective
89: Input Parameters:
90: + da - The `DMDA`
91: - bx,by,bz - One of `DM_BOUNDARY_NONE`, `DM_BOUNDARY_GHOSTED`, `DM_BOUNDARY_PERIODIC`
93: Level: intermediate
95: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`, `DMBoundaryType`
96: @*/
97: PetscErrorCode DMDASetBoundaryType(DM da, DMBoundaryType bx, DMBoundaryType by, DMBoundaryType bz)
98: {
99: DM_DA *dd = (DM_DA *)da->data;
101: PetscFunctionBegin;
106: PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
107: dd->bx = bx;
108: dd->by = by;
109: dd->bz = bz;
110: PetscFunctionReturn(PETSC_SUCCESS);
111: }
113: /*@
114: DMDASetDof - Sets the number of degrees of freedom per vertex
116: Not collective
118: Input Parameters:
119: + da - The `DMDA`
120: - dof - Number of degrees of freedom
122: Level: intermediate
124: .seealso: `DM`, `DMDA`, `DMDAGetDof()`, `DMDACreate()`, `DMDestroy()`
125: @*/
126: PetscErrorCode DMDASetDof(DM da, PetscInt dof)
127: {
128: DM_DA *dd = (DM_DA *)da->data;
130: PetscFunctionBegin;
133: PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
134: dd->w = dof;
135: da->bs = dof;
136: PetscFunctionReturn(PETSC_SUCCESS);
137: }
139: /*@
140: DMDAGetDof - Gets the number of degrees of freedom per vertex
142: Not collective
144: Input Parameter:
145: . da - The `DMDA`
147: Output Parameter:
148: . dof - Number of degrees of freedom
150: Level: intermediate
152: .seealso: `DM`, `DMDA`, `DMDASetDof()`, `DMDACreate()`, `DMDestroy()`
153: @*/
154: PetscErrorCode DMDAGetDof(DM da, PetscInt *dof)
155: {
156: DM_DA *dd = (DM_DA *)da->data;
158: PetscFunctionBegin;
161: *dof = dd->w;
162: PetscFunctionReturn(PETSC_SUCCESS);
163: }
165: /*@
166: DMDAGetOverlap - Gets the size of the per-processor overlap.
168: Not collective
170: Input Parameter:
171: . da - The `DMDA`
173: Output Parameters:
174: + x - Overlap in the x direction
175: . y - Overlap in the y direction
176: - z - Overlap in the z direction
178: Level: intermediate
180: .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetOverlap()`
181: @*/
182: PetscErrorCode DMDAGetOverlap(DM da, PetscInt *x, PetscInt *y, PetscInt *z)
183: {
184: DM_DA *dd = (DM_DA *)da->data;
186: PetscFunctionBegin;
188: if (x) *x = dd->xol;
189: if (y) *y = dd->yol;
190: if (z) *z = dd->zol;
191: PetscFunctionReturn(PETSC_SUCCESS);
192: }
194: /*@
195: DMDASetOverlap - Sets the size of the per-processor overlap.
197: Not collective
199: Input Parameters:
200: + da - The `DMDA`
201: . x - Overlap in the x direction
202: . y - Overlap in the y direction
203: - z - Overlap in the z direction
205: Level: intermediate
207: .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetOverlap()`
208: @*/
209: PetscErrorCode DMDASetOverlap(DM da, PetscInt x, PetscInt y, PetscInt z)
210: {
211: DM_DA *dd = (DM_DA *)da->data;
213: PetscFunctionBegin;
218: dd->xol = x;
219: dd->yol = y;
220: dd->zol = z;
221: PetscFunctionReturn(PETSC_SUCCESS);
222: }
224: /*@
225: DMDAGetNumLocalSubDomains - Gets the number of local subdomains created upon decomposition.
227: Not collective
229: Input Parameters:
230: . da - The `DMDA`
232: Output Parameters:
233: . Nsub - Number of local subdomains created upon decomposition
235: Level: intermediate
237: .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDASetNumLocalSubDomains()`
238: @*/
239: PetscErrorCode DMDAGetNumLocalSubDomains(DM da, PetscInt *Nsub)
240: {
241: DM_DA *dd = (DM_DA *)da->data;
243: PetscFunctionBegin;
245: if (Nsub) *Nsub = dd->Nsub;
246: PetscFunctionReturn(PETSC_SUCCESS);
247: }
249: /*@
250: DMDASetNumLocalSubDomains - Sets the number of local subdomains created upon decomposition.
252: Not collective
254: Input Parameters:
255: + da - The `DMDA`
256: - Nsub - The number of local subdomains requested
258: Level: intermediate
260: .seealso: `DM`, `DMDA`, `DMCreateDomainDecomposition()`, `DMDAGetNumLocalSubDomains()`
261: @*/
262: PetscErrorCode DMDASetNumLocalSubDomains(DM da, PetscInt Nsub)
263: {
264: DM_DA *dd = (DM_DA *)da->data;
266: PetscFunctionBegin;
269: dd->Nsub = Nsub;
270: PetscFunctionReturn(PETSC_SUCCESS);
271: }
273: /*@
274: DMDASetOffset - Sets the index offset of the DA.
276: Collective on da
278: Input Parameters:
279: + da - The `DMDA`
280: . xo - The offset in the x direction
281: . yo - The offset in the y direction
282: - zo - The offset in the z direction
284: Level: intermediate
286: Note:
287: This is used primarily to overlap a computation on a local `DMDA` with that on a global `DMDA` without
288: changing boundary conditions or subdomain features that depend upon the global offsets.
290: .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
291: @*/
292: PetscErrorCode DMDASetOffset(DM da, PetscInt xo, PetscInt yo, PetscInt zo, PetscInt Mo, PetscInt No, PetscInt Po)
293: {
294: DM_DA *dd = (DM_DA *)da->data;
296: PetscFunctionBegin;
304: dd->xo = xo;
305: dd->yo = yo;
306: dd->zo = zo;
307: dd->Mo = Mo;
308: dd->No = No;
309: dd->Po = Po;
311: if (da->coordinates[0].dm) PetscCall(DMDASetOffset(da->coordinates[0].dm, xo, yo, zo, Mo, No, Po));
312: PetscFunctionReturn(PETSC_SUCCESS);
313: }
315: /*@
316: DMDAGetOffset - Gets the index offset of the `DMDA`.
318: Not collective
320: Input Parameter:
321: . da - The `DMDA`
323: Output Parameters:
324: + xo - The offset in the x direction
325: . yo - The offset in the y direction
326: . zo - The offset in the z direction
327: . Mo - The global size in the x direction
328: . No - The global size in the y direction
329: - Po - The global size in the z direction
331: Level: intermediate
333: .seealso: `DM`, `DMDA`, `DMDASetOffset()`, `DMDAVecGetArray()`
334: @*/
335: PetscErrorCode DMDAGetOffset(DM da, PetscInt *xo, PetscInt *yo, PetscInt *zo, PetscInt *Mo, PetscInt *No, PetscInt *Po)
336: {
337: DM_DA *dd = (DM_DA *)da->data;
339: PetscFunctionBegin;
341: if (xo) *xo = dd->xo;
342: if (yo) *yo = dd->yo;
343: if (zo) *zo = dd->zo;
344: if (Mo) *Mo = dd->Mo;
345: if (No) *No = dd->No;
346: if (Po) *Po = dd->Po;
347: PetscFunctionReturn(PETSC_SUCCESS);
348: }
350: /*@
351: DMDAGetNonOverlappingRegion - Gets the indices of the nonoverlapping region of a subdomain `DM`.
353: Not collective
355: Input Parameter:
356: . da - The `DMDA`
358: Output Parameters:
359: + xs - The start of the region in x
360: . ys - The start of the region in y
361: . zs - The start of the region in z
362: . xs - The size of the region in x
363: . ys - The size of the region in y
364: - zs - The size of the region in z
366: Level: intermediate
368: .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
369: @*/
370: PetscErrorCode DMDAGetNonOverlappingRegion(DM da, PetscInt *xs, PetscInt *ys, PetscInt *zs, PetscInt *xm, PetscInt *ym, PetscInt *zm)
371: {
372: DM_DA *dd = (DM_DA *)da->data;
374: PetscFunctionBegin;
376: if (xs) *xs = dd->nonxs;
377: if (ys) *ys = dd->nonys;
378: if (zs) *zs = dd->nonzs;
379: if (xm) *xm = dd->nonxm;
380: if (ym) *ym = dd->nonym;
381: if (zm) *zm = dd->nonzm;
382: PetscFunctionReturn(PETSC_SUCCESS);
383: }
385: /*@
386: DMDASetNonOverlappingRegion - Sets the indices of the nonoverlapping region of a subdomain `DM`.
388: Collective on da
390: Input Parameters:
391: + da - The `DMDA`
392: . xs - The start of the region in x
393: . ys - The start of the region in y
394: . zs - The start of the region in z
395: . xs - The size of the region in x
396: . ys - The size of the region in y
397: - zs - The size of the region in z
399: Level: intermediate
401: .seealso: `DM`, `DMDA`, `DMDAGetOffset()`, `DMDAVecGetArray()`
402: @*/
403: PetscErrorCode DMDASetNonOverlappingRegion(DM da, PetscInt xs, PetscInt ys, PetscInt zs, PetscInt xm, PetscInt ym, PetscInt zm)
404: {
405: DM_DA *dd = (DM_DA *)da->data;
407: PetscFunctionBegin;
415: dd->nonxs = xs;
416: dd->nonys = ys;
417: dd->nonzs = zs;
418: dd->nonxm = xm;
419: dd->nonym = ym;
420: dd->nonzm = zm;
422: PetscFunctionReturn(PETSC_SUCCESS);
423: }
425: /*@
426: DMDASetStencilType - Sets the type of the communication stencil
428: Logically Collective on da
430: Input Parameters:
431: + da - The `DMDA`
432: - stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
434: Level: intermediate
436: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
437: @*/
438: PetscErrorCode DMDASetStencilType(DM da, DMDAStencilType stype)
439: {
440: DM_DA *dd = (DM_DA *)da->data;
442: PetscFunctionBegin;
445: PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
446: dd->stencil_type = stype;
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: /*@
451: DMDAGetStencilType - Gets the type of the communication stencil
453: Not collective
455: Input Parameter:
456: . da - The `DMDA`
458: Output Parameter:
459: . stype - The stencil type, use either `DMDA_STENCIL_BOX` or `DMDA_STENCIL_STAR`.
461: Level: intermediate
463: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
464: @*/
465: PetscErrorCode DMDAGetStencilType(DM da, DMDAStencilType *stype)
466: {
467: DM_DA *dd = (DM_DA *)da->data;
469: PetscFunctionBegin;
472: *stype = dd->stencil_type;
473: PetscFunctionReturn(PETSC_SUCCESS);
474: }
476: /*@
477: DMDASetStencilWidth - Sets the width of the communication stencil
479: Logically Collective on da
481: Input Parameters:
482: + da - The `DMDA`
483: - width - The stencil width
485: Level: intermediate
487: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
488: @*/
489: PetscErrorCode DMDASetStencilWidth(DM da, PetscInt width)
490: {
491: DM_DA *dd = (DM_DA *)da->data;
493: PetscFunctionBegin;
496: PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
497: dd->s = width;
498: PetscFunctionReturn(PETSC_SUCCESS);
499: }
501: /*@
502: DMDAGetStencilWidth - Gets the width of the communication stencil
504: Not collective
506: Input Parameter:
507: . da - The `DMDA`
509: Output Parameter:
510: . width - The stencil width
512: Level: intermediate
514: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
515: @*/
516: PetscErrorCode DMDAGetStencilWidth(DM da, PetscInt *width)
517: {
518: DM_DA *dd = (DM_DA *)da->data;
520: PetscFunctionBegin;
523: *width = dd->s;
524: PetscFunctionReturn(PETSC_SUCCESS);
525: }
527: static PetscErrorCode DMDACheckOwnershipRanges_Private(DM da, PetscInt M, PetscInt m, const PetscInt lx[])
528: {
529: PetscInt i, sum;
531: PetscFunctionBegin;
532: PetscCheck(M >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Global dimension not set");
533: for (i = sum = 0; i < m; i++) sum += lx[i];
534: PetscCheck(sum == M, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Ownership ranges sum to %" PetscInt_FMT " but global dimension is %" PetscInt_FMT, sum, M);
535: PetscFunctionReturn(PETSC_SUCCESS);
536: }
538: /*@
539: DMDASetOwnershipRanges - Sets the number of nodes in each direction on each process
541: Logically Collective on da
543: Input Parameters:
544: + da - The `DMDA`
545: . lx - array containing number of nodes in the X direction on each process, or NULL. If non-null, must be of length da->m
546: . ly - array containing number of nodes in the Y direction on each process, or NULL. If non-null, must be of length da->n
547: - lz - array containing number of nodes in the Z direction on each process, or NULL. If non-null, must be of length da->p.
549: Level: intermediate
551: Note: these numbers are NOT multiplied by the number of dof per node.
553: .seealso: `DM`, `DMDA`, `DMDACreate()`, `DMDestroy()`
554: @*/
555: PetscErrorCode DMDASetOwnershipRanges(DM da, const PetscInt lx[], const PetscInt ly[], const PetscInt lz[])
556: {
557: DM_DA *dd = (DM_DA *)da->data;
559: PetscFunctionBegin;
561: PetscCheck(!da->setupcalled, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "This function must be called before DMSetUp()");
562: if (lx) {
563: PetscCheck(dd->m >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
564: PetscCall(DMDACheckOwnershipRanges_Private(da, dd->M, dd->m, lx));
565: if (!dd->lx) PetscCall(PetscMalloc1(dd->m, &dd->lx));
566: PetscCall(PetscArraycpy(dd->lx, lx, dd->m));
567: }
568: if (ly) {
569: PetscCheck(dd->n >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
570: PetscCall(DMDACheckOwnershipRanges_Private(da, dd->N, dd->n, ly));
571: if (!dd->ly) PetscCall(PetscMalloc1(dd->n, &dd->ly));
572: PetscCall(PetscArraycpy(dd->ly, ly, dd->n));
573: }
574: if (lz) {
575: PetscCheck(dd->p >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONGSTATE, "Cannot set ownership ranges before setting number of procs");
576: PetscCall(DMDACheckOwnershipRanges_Private(da, dd->P, dd->p, lz));
577: if (!dd->lz) PetscCall(PetscMalloc1(dd->p, &dd->lz));
578: PetscCall(PetscArraycpy(dd->lz, lz, dd->p));
579: }
580: PetscFunctionReturn(PETSC_SUCCESS);
581: }
583: /*@
584: DMDASetInterpolationType - Sets the type of interpolation that will be
585: returned by `DMCreateInterpolation()`
587: Logically Collective on da
589: Input Parameters:
590: + da - initial distributed array
591: - ctype - `DMDA_Q1` and `DMDA_Q0` are currently the only supported forms
593: Level: intermediate
595: Note:
596: You should call this on the coarser of the two `DMDA` you pass to `DMCreateInterpolation()`
598: .seealso: `DM`, `DMDA`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `DMDestroy()`, `DMDAInterpolationType`
599: @*/
600: PetscErrorCode DMDASetInterpolationType(DM da, DMDAInterpolationType ctype)
601: {
602: DM_DA *dd = (DM_DA *)da->data;
604: PetscFunctionBegin;
607: dd->interptype = ctype;
608: PetscFunctionReturn(PETSC_SUCCESS);
609: }
611: /*@
612: DMDAGetInterpolationType - Gets the type of interpolation that will be
613: used by `DMCreateInterpolation()`
615: Not Collective
617: Input Parameter:
618: . da - distributed array
620: Output Parameter:
621: . ctype - interpolation type (`DMDA_Q1` and `DMDA_Q0` are currently the only supported forms)
623: Level: intermediate
625: .seealso: `DM`, `DMDA`, `DMDAInterpolationType`, `DMDASetInterpolationType()`, `DMCreateInterpolation()`
626: @*/
627: PetscErrorCode DMDAGetInterpolationType(DM da, DMDAInterpolationType *ctype)
628: {
629: DM_DA *dd = (DM_DA *)da->data;
631: PetscFunctionBegin;
634: *ctype = dd->interptype;
635: PetscFunctionReturn(PETSC_SUCCESS);
636: }
638: /*@C
639: DMDAGetNeighbors - Gets an array containing the MPI rank of all the current
640: processes neighbors.
642: Not Collective
644: Input Parameter:
645: . da - the `DMDA` object
647: Output Parameters:
648: . ranks - the neighbors ranks, stored with the x index increasing most rapidly.
649: this process itself is in the list
651: Level: intermediate
653: Notes:
654: In 2d the array is of length 9, in 3d of length 27
656: Not supported in 1d
658: Do not free the array, it is freed when the `DMDA` is destroyed.
660: Fortran Note:
661: In fortran you must pass in an array of the appropriate length.
663: .seealso: `DMDA`, `DM`
664: @*/
665: PetscErrorCode DMDAGetNeighbors(DM da, const PetscMPIInt *ranks[])
666: {
667: DM_DA *dd = (DM_DA *)da->data;
669: PetscFunctionBegin;
671: *ranks = dd->neighbors;
672: PetscFunctionReturn(PETSC_SUCCESS);
673: }
675: /*@C
676: DMDAGetOwnershipRanges - Gets the ranges of indices in the x, y and z direction that are owned by each process
678: Not Collective
680: Input Parameter:
681: . da - the `DMDA` object
683: Output Parameters:
684: + lx - ownership along x direction (optional)
685: . ly - ownership along y direction (optional)
686: - lz - ownership along z direction (optional)
688: Level: intermediate
690: Note:
691: These correspond to the optional final arguments passed to `DMDACreate()`, `DMDACreate2d()`, `DMDACreate3d()`
693: In C you should not free these arrays, nor change the values in them. They will only have valid values while the
694: `DMDA` they came from still exists (has not been destroyed).
696: These numbers are NOT multiplied by the number of dof per node.
698: Fortran Note:
699: In Fortran one must pass in arrays lx, ly, and lz that are long enough to hold the values; the sixth, seventh and
700: eighth arguments from `DMDAGetInfo()`
702: .seealso: `DM`, `DMDA`, `DMDAGetCorners()`, `DMDAGetGhostCorners()`, `DMDACreate()`, `DMDACreate1d()`, `DMDACreate2d()`, `DMDACreate3d()`, `VecGetOwnershipRanges()`
703: @*/
704: PetscErrorCode DMDAGetOwnershipRanges(DM da, const PetscInt *lx[], const PetscInt *ly[], const PetscInt *lz[])
705: {
706: DM_DA *dd = (DM_DA *)da->data;
708: PetscFunctionBegin;
710: if (lx) *lx = dd->lx;
711: if (ly) *ly = dd->ly;
712: if (lz) *lz = dd->lz;
713: PetscFunctionReturn(PETSC_SUCCESS);
714: }
716: /*@
717: DMDASetRefinementFactor - Set the ratios that the `DMDA` grid is refined
719: Logically Collective on da
721: Input Parameters:
722: + da - the `DMDA` object
723: . refine_x - ratio of fine grid to coarse in x direction (2 by default)
724: . refine_y - ratio of fine grid to coarse in y direction (2 by default)
725: - refine_z - ratio of fine grid to coarse in z direction (2 by default)
727: Options Database Keys:
728: + -da_refine_x refine_x - refinement ratio in x direction
729: . -da_refine_y rafine_y - refinement ratio in y direction
730: . -da_refine_z refine_z - refinement ratio in z direction
731: - -da_refine <n> - refine the DMDA object n times when it is created.
733: Level: intermediate
735: Note:
736: Pass `PETSC_IGNORE` to leave a value unchanged
738: .seealso: `DM`, `DMDA`, `DMRefine()`, `DMDAGetRefinementFactor()`
739: @*/
740: PetscErrorCode DMDASetRefinementFactor(DM da, PetscInt refine_x, PetscInt refine_y, PetscInt refine_z)
741: {
742: DM_DA *dd = (DM_DA *)da->data;
744: PetscFunctionBegin;
750: if (refine_x > 0) dd->refine_x = refine_x;
751: if (refine_y > 0) dd->refine_y = refine_y;
752: if (refine_z > 0) dd->refine_z = refine_z;
753: PetscFunctionReturn(PETSC_SUCCESS);
754: }
756: /*@C
757: DMDAGetRefinementFactor - Gets the ratios that the `DMDA` grid is refined
759: Not Collective
761: Input Parameter:
762: . da - the `DMDA` object
764: Output Parameters:
765: + refine_x - ratio of fine grid to coarse in x direction (2 by default)
766: . refine_y - ratio of fine grid to coarse in y direction (2 by default)
767: - refine_z - ratio of fine grid to coarse in z direction (2 by default)
769: Level: intermediate
771: Note:
772: Pass NULL for values you do not need
774: .seealso: `DM`, `DMDA`, `DMRefine()`, `DMDASetRefinementFactor()`
775: @*/
776: PetscErrorCode DMDAGetRefinementFactor(DM da, PetscInt *refine_x, PetscInt *refine_y, PetscInt *refine_z)
777: {
778: DM_DA *dd = (DM_DA *)da->data;
780: PetscFunctionBegin;
782: if (refine_x) *refine_x = dd->refine_x;
783: if (refine_y) *refine_y = dd->refine_y;
784: if (refine_z) *refine_z = dd->refine_z;
785: PetscFunctionReturn(PETSC_SUCCESS);
786: }
788: /*@C
789: DMDASetGetMatrix - Sets the routine used by the `DMDA` to allocate a matrix.
791: Logically Collective on da
793: Input Parameters:
794: + da - the `DMDA` object
795: - f - the function that allocates the matrix for that specific DMDA
797: Level: developer
799: Note:
800: See `DMDASetBlockFills()` that provides a simple way to provide the nonzero structure for
801: the diagonal and off-diagonal blocks of the matrix
803: Fortran Note:
804: Not supported from Fortran
806: .seealso: `DM`, `DMDA`, `DMCreateMatrix()`, `DMDASetBlockFills()`
807: @*/
808: PetscErrorCode DMDASetGetMatrix(DM da, PetscErrorCode (*f)(DM, Mat *))
809: {
810: PetscFunctionBegin;
812: da->ops->creatematrix = f;
813: PetscFunctionReturn(PETSC_SUCCESS);
814: }
816: /*@
817: DMDAMapMatStencilToGlobal - Map a list of `MatStencil` on a grid to global indices.
819: Not Collective
821: Input Parameters:
822: + da - the `DMDA` object
823: . m - number of MatStencils
824: - idxm - grid points (and component number when dof > 1)
826: Output Parameter:
827: . gidxm - global row indices
829: Level: intermediate
831: .seealso: `DM`, `DMDA`, `MatStencil`
832: @*/
833: PetscErrorCode DMDAMapMatStencilToGlobal(DM da, PetscInt m, const MatStencil idxm[], PetscInt gidxm[])
834: {
835: const DM_DA *dd = (const DM_DA *)da->data;
836: const PetscInt *dxm = (const PetscInt *)idxm;
837: PetscInt i, j, sdim, tmp, dim;
838: PetscInt dims[4], starts[4], dims2[3], starts2[3], dof = dd->w;
839: ISLocalToGlobalMapping ltog;
841: PetscFunctionBegin;
842: if (m <= 0) PetscFunctionReturn(PETSC_SUCCESS);
844: /* Code adapted from DMDAGetGhostCorners() */
845: starts2[0] = dd->Xs / dof + dd->xo;
846: starts2[1] = dd->Ys + dd->yo;
847: starts2[2] = dd->Zs + dd->zo;
848: dims2[0] = (dd->Xe - dd->Xs) / dof;
849: dims2[1] = (dd->Ye - dd->Ys);
850: dims2[2] = (dd->Ze - dd->Zs);
852: /* As if we do MatSetStencil() to get dims[]/starts[] of mat->stencil */
853: dim = da->dim; /* DA dim: 1 to 3 */
854: sdim = dim + (dof > 1 ? 1 : 0); /* Dimensions in MatStencil's (k,j,i,c) view */
855: for (i = 0; i < dim; i++) { /* Reverse the order and also skip the unused dimensions */
856: dims[i] = dims2[dim - i - 1]; /* ex. dims/starts[] are in order of {i} for 1D, {j,i} for 2D and {k,j,i} for 3D */
857: starts[i] = starts2[dim - i - 1];
858: }
859: starts[dim] = 0; /* Append the extra dim for dof (won't be used below if dof=1) */
860: dims[dim] = dof;
862: /* Map stencils to local indices (code adapted from MatSetValuesStencil()) */
863: for (i = 0; i < m; i++) {
864: dxm += 3 - dim; /* Input is {k,j,i,c}; move the pointer to the first used index, e.g., j in 2D */
865: tmp = 0;
866: for (j = 0; j < sdim; j++) { /* Iter over, ex. j,i or j,i,c in 2D */
867: if (tmp < 0 || dxm[j] < starts[j] || dxm[j] >= (starts[j] + dims[j])) tmp = -1; /* Beyond the ghost region, therefore ignored with negative indices */
868: else tmp = tmp * dims[j] + (dxm[j] - starts[j]);
869: }
870: gidxm[i] = tmp;
871: /* Move to the next MatStencil point */
872: if (dof > 1) dxm += sdim; /* c is already counted in sdim */
873: else dxm += sdim + 1; /* skip the unused c */
874: }
876: /* Map local indices to global indices */
877: PetscCall(DMGetLocalToGlobalMapping(da, <og));
878: PetscCall(ISLocalToGlobalMappingApply(ltog, m, gidxm, gidxm));
879: PetscFunctionReturn(PETSC_SUCCESS);
880: }
882: /*
883: Creates "balanced" ownership ranges after refinement, constrained by the need for the
884: fine grid boundaries to fall within one stencil width of the coarse partition.
886: Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
887: */
888: static PetscErrorCode DMDARefineOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lc[], PetscInt lf[])
889: {
890: PetscInt i, totalc = 0, remaining, startc = 0, startf = 0;
892: PetscFunctionBegin;
893: PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
894: if (ratio == 1) {
895: PetscCall(PetscArraycpy(lf, lc, m));
896: PetscFunctionReturn(PETSC_SUCCESS);
897: }
898: for (i = 0; i < m; i++) totalc += lc[i];
899: remaining = (!periodic) + ratio * (totalc - (!periodic));
900: for (i = 0; i < m; i++) {
901: PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
902: if (i == m - 1) lf[i] = want;
903: else {
904: const PetscInt nextc = startc + lc[i];
905: /* Move the first fine node of the next subdomain to the right until the coarse node on its left is within one
906: * coarse stencil width of the first coarse node in the next subdomain. */
907: while ((startf + want) / ratio < nextc - stencil_width) want++;
908: /* Move the last fine node in the current subdomain to the left until the coarse node on its right is within one
909: * coarse stencil width of the last coarse node in the current subdomain. */
910: while ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width) want--;
911: /* Make sure all constraints are satisfied */
912: if (want < 0 || want > remaining || ((startf + want) / ratio < nextc - stencil_width) || ((startf + want - 1 + ratio - 1) / ratio > nextc - 1 + stencil_width))
913: SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible refined ownership range");
914: }
915: lf[i] = want;
916: startc += lc[i];
917: startf += lf[i];
918: remaining -= lf[i];
919: }
920: PetscFunctionReturn(PETSC_SUCCESS);
921: }
923: /*
924: Creates "balanced" ownership ranges after coarsening, constrained by the need for the
925: fine grid boundaries to fall within one stencil width of the coarse partition.
927: Uses a greedy algorithm to handle non-ideal layouts, could probably do something better.
928: */
929: static PetscErrorCode DMDACoarsenOwnershipRanges(DM da, PetscBool periodic, PetscInt stencil_width, PetscInt ratio, PetscInt m, const PetscInt lf[], PetscInt lc[])
930: {
931: PetscInt i, totalf, remaining, startc, startf;
933: PetscFunctionBegin;
934: PetscCheck(ratio >= 1, PetscObjectComm((PetscObject)da), PETSC_ERR_USER, "Requested refinement ratio %" PetscInt_FMT " must be at least 1", ratio);
935: if (ratio == 1) {
936: PetscCall(PetscArraycpy(lc, lf, m));
937: PetscFunctionReturn(PETSC_SUCCESS);
938: }
939: for (i = 0, totalf = 0; i < m; i++) totalf += lf[i];
940: remaining = (!periodic) + (totalf - (!periodic)) / ratio;
941: for (i = 0, startc = 0, startf = 0; i < m; i++) {
942: PetscInt want = remaining / (m - i) + !!(remaining % (m - i));
943: if (i == m - 1) lc[i] = want;
944: else {
945: const PetscInt nextf = startf + lf[i];
946: /* Slide first coarse node of next subdomain to the left until the coarse node to the left of the first fine
947: * node is within one stencil width. */
948: while (nextf / ratio < startc + want - stencil_width) want--;
949: /* Slide the last coarse node of the current subdomain to the right until the coarse node to the right of the last
950: * fine node is within one stencil width. */
951: while ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width) want++;
952: if (want < 0 || want > remaining || (nextf / ratio < startc + want - stencil_width) || ((nextf - 1 + ratio - 1) / ratio > startc + want - 1 + stencil_width))
953: SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Could not find a compatible coarsened ownership range");
954: }
955: lc[i] = want;
956: startc += lc[i];
957: startf += lf[i];
958: remaining -= lc[i];
959: }
960: PetscFunctionReturn(PETSC_SUCCESS);
961: }
963: PetscErrorCode DMRefine_DA(DM da, MPI_Comm comm, DM *daref)
964: {
965: PetscInt M, N, P, i, dim;
966: Vec coordsc, coordsf;
967: DM da2;
968: DM_DA *dd = (DM_DA *)da->data, *dd2;
970: PetscFunctionBegin;
974: PetscCall(DMGetDimension(da, &dim));
975: if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
976: M = dd->refine_x * dd->M;
977: } else {
978: M = 1 + dd->refine_x * (dd->M - 1);
979: }
980: if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
981: if (dim > 1) {
982: N = dd->refine_y * dd->N;
983: } else {
984: N = 1;
985: }
986: } else {
987: N = 1 + dd->refine_y * (dd->N - 1);
988: }
989: if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
990: if (dim > 2) {
991: P = dd->refine_z * dd->P;
992: } else {
993: P = 1;
994: }
995: } else {
996: P = 1 + dd->refine_z * (dd->P - 1);
997: }
998: PetscCall(DMDACreate(PetscObjectComm((PetscObject)da), &da2));
999: PetscCall(DMSetOptionsPrefix(da2, ((PetscObject)da)->prefix));
1000: PetscCall(DMSetDimension(da2, dim));
1001: PetscCall(DMDASetSizes(da2, M, N, P));
1002: PetscCall(DMDASetNumProcs(da2, dd->m, dd->n, dd->p));
1003: PetscCall(DMDASetBoundaryType(da2, dd->bx, dd->by, dd->bz));
1004: PetscCall(DMDASetDof(da2, dd->w));
1005: PetscCall(DMDASetStencilType(da2, dd->stencil_type));
1006: PetscCall(DMDASetStencilWidth(da2, dd->s));
1007: if (dim == 3) {
1008: PetscInt *lx, *ly, *lz;
1009: PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1010: PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1011: PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1012: PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_z, dd->p, dd->lz, lz));
1013: PetscCall(DMDASetOwnershipRanges(da2, lx, ly, lz));
1014: PetscCall(PetscFree3(lx, ly, lz));
1015: } else if (dim == 2) {
1016: PetscInt *lx, *ly;
1017: PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1018: PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1019: PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_y, dd->n, dd->ly, ly));
1020: PetscCall(DMDASetOwnershipRanges(da2, lx, ly, NULL));
1021: PetscCall(PetscFree2(lx, ly));
1022: } else if (dim == 1) {
1023: PetscInt *lx;
1024: PetscCall(PetscMalloc1(dd->m, &lx));
1025: PetscCall(DMDARefineOwnershipRanges(da, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->refine_x, dd->m, dd->lx, lx));
1026: PetscCall(DMDASetOwnershipRanges(da2, lx, NULL, NULL));
1027: PetscCall(PetscFree(lx));
1028: }
1029: dd2 = (DM_DA *)da2->data;
1031: /* allow overloaded (user replaced) operations to be inherited by refinement clones */
1032: da2->ops->creatematrix = da->ops->creatematrix;
1033: /* da2->ops->createinterpolation = da->ops->createinterpolation; this causes problem with SNESVI */
1034: da2->ops->getcoloring = da->ops->getcoloring;
1035: dd2->interptype = dd->interptype;
1037: /* copy fill information if given */
1038: if (dd->dfill) {
1039: PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1040: PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1041: }
1042: if (dd->ofill) {
1043: PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1044: PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1045: }
1046: /* copy the refine information */
1047: dd2->coarsen_x = dd2->refine_x = dd->refine_x;
1048: dd2->coarsen_y = dd2->refine_y = dd->refine_y;
1049: dd2->coarsen_z = dd2->refine_z = dd->refine_z;
1051: if (dd->refine_z_hier) {
1052: if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[da->levelup - da->leveldown + 1];
1053: if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[da->levelup - da->leveldown];
1054: dd2->refine_z_hier_n = dd->refine_z_hier_n;
1055: PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1056: PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1057: }
1058: if (dd->refine_y_hier) {
1059: if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[da->levelup - da->leveldown + 1];
1060: if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[da->levelup - da->leveldown];
1061: dd2->refine_y_hier_n = dd->refine_y_hier_n;
1062: PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1063: PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1064: }
1065: if (dd->refine_x_hier) {
1066: if (da->levelup - da->leveldown + 1 > -1 && da->levelup - da->leveldown + 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[da->levelup - da->leveldown + 1];
1067: if (da->levelup - da->leveldown > -1 && da->levelup - da->leveldown < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[da->levelup - da->leveldown];
1068: dd2->refine_x_hier_n = dd->refine_x_hier_n;
1069: PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1070: PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1071: }
1073: /* copy vector type information */
1074: PetscCall(DMSetVecType(da2, da->vectype));
1076: dd2->lf = dd->lf;
1077: dd2->lj = dd->lj;
1079: da2->leveldown = da->leveldown;
1080: da2->levelup = da->levelup + 1;
1082: PetscCall(DMSetUp(da2));
1084: /* interpolate coordinates if they are set on the coarse grid */
1085: PetscCall(DMGetCoordinates(da, &coordsc));
1086: if (coordsc) {
1087: DM cdaf, cdac;
1088: Mat II;
1090: PetscCall(DMGetCoordinateDM(da, &cdac));
1091: PetscCall(DMGetCoordinateDM(da2, &cdaf));
1092: /* force creation of the coordinate vector */
1093: PetscCall(DMDASetUniformCoordinates(da2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1094: PetscCall(DMGetCoordinates(da2, &coordsf));
1095: PetscCall(DMCreateInterpolation(cdac, cdaf, &II, NULL));
1096: PetscCall(MatInterpolate(II, coordsc, coordsf));
1097: PetscCall(MatDestroy(&II));
1098: }
1100: for (i = 0; i < da->bs; i++) {
1101: const char *fieldname;
1102: PetscCall(DMDAGetFieldName(da, i, &fieldname));
1103: PetscCall(DMDASetFieldName(da2, i, fieldname));
1104: }
1106: *daref = da2;
1107: PetscFunctionReturn(PETSC_SUCCESS);
1108: }
1110: PetscErrorCode DMCoarsen_DA(DM dmf, MPI_Comm comm, DM *dmc)
1111: {
1112: PetscInt M, N, P, i, dim;
1113: Vec coordsc, coordsf;
1114: DM dmc2;
1115: DM_DA *dd = (DM_DA *)dmf->data, *dd2;
1117: PetscFunctionBegin;
1121: PetscCall(DMGetDimension(dmf, &dim));
1122: if (dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1123: M = dd->M / dd->coarsen_x;
1124: } else {
1125: M = 1 + (dd->M - 1) / dd->coarsen_x;
1126: }
1127: if (dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1128: if (dim > 1) {
1129: N = dd->N / dd->coarsen_y;
1130: } else {
1131: N = 1;
1132: }
1133: } else {
1134: N = 1 + (dd->N - 1) / dd->coarsen_y;
1135: }
1136: if (dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0) {
1137: if (dim > 2) {
1138: P = dd->P / dd->coarsen_z;
1139: } else {
1140: P = 1;
1141: }
1142: } else {
1143: P = 1 + (dd->P - 1) / dd->coarsen_z;
1144: }
1145: PetscCall(DMDACreate(PetscObjectComm((PetscObject)dmf), &dmc2));
1146: PetscCall(DMSetOptionsPrefix(dmc2, ((PetscObject)dmf)->prefix));
1147: PetscCall(DMSetDimension(dmc2, dim));
1148: PetscCall(DMDASetSizes(dmc2, M, N, P));
1149: PetscCall(DMDASetNumProcs(dmc2, dd->m, dd->n, dd->p));
1150: PetscCall(DMDASetBoundaryType(dmc2, dd->bx, dd->by, dd->bz));
1151: PetscCall(DMDASetDof(dmc2, dd->w));
1152: PetscCall(DMDASetStencilType(dmc2, dd->stencil_type));
1153: PetscCall(DMDASetStencilWidth(dmc2, dd->s));
1154: if (dim == 3) {
1155: PetscInt *lx, *ly, *lz;
1156: PetscCall(PetscMalloc3(dd->m, &lx, dd->n, &ly, dd->p, &lz));
1157: PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1158: PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1159: PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bz == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_z, dd->p, dd->lz, lz));
1160: PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, lz));
1161: PetscCall(PetscFree3(lx, ly, lz));
1162: } else if (dim == 2) {
1163: PetscInt *lx, *ly;
1164: PetscCall(PetscMalloc2(dd->m, &lx, dd->n, &ly));
1165: PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1166: PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->by == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_y, dd->n, dd->ly, ly));
1167: PetscCall(DMDASetOwnershipRanges(dmc2, lx, ly, NULL));
1168: PetscCall(PetscFree2(lx, ly));
1169: } else if (dim == 1) {
1170: PetscInt *lx;
1171: PetscCall(PetscMalloc1(dd->m, &lx));
1172: PetscCall(DMDACoarsenOwnershipRanges(dmf, (PetscBool)(dd->bx == DM_BOUNDARY_PERIODIC || dd->interptype == DMDA_Q0), dd->s, dd->coarsen_x, dd->m, dd->lx, lx));
1173: PetscCall(DMDASetOwnershipRanges(dmc2, lx, NULL, NULL));
1174: PetscCall(PetscFree(lx));
1175: }
1176: dd2 = (DM_DA *)dmc2->data;
1178: /* allow overloaded (user replaced) operations to be inherited by refinement clones; why are only some inherited and not all? */
1179: /* dmc2->ops->createinterpolation = dmf->ops->createinterpolation; copying this one causes trouble for DMSetVI */
1180: dmc2->ops->creatematrix = dmf->ops->creatematrix;
1181: dmc2->ops->getcoloring = dmf->ops->getcoloring;
1182: dd2->interptype = dd->interptype;
1184: /* copy fill information if given */
1185: if (dd->dfill) {
1186: PetscCall(PetscMalloc1(dd->dfill[dd->w] + dd->w + 1, &dd2->dfill));
1187: PetscCall(PetscArraycpy(dd2->dfill, dd->dfill, dd->dfill[dd->w] + dd->w + 1));
1188: }
1189: if (dd->ofill) {
1190: PetscCall(PetscMalloc1(dd->ofill[dd->w] + dd->w + 1, &dd2->ofill));
1191: PetscCall(PetscArraycpy(dd2->ofill, dd->ofill, dd->ofill[dd->w] + dd->w + 1));
1192: }
1193: /* copy the refine information */
1194: dd2->coarsen_x = dd2->refine_x = dd->coarsen_x;
1195: dd2->coarsen_y = dd2->refine_y = dd->coarsen_y;
1196: dd2->coarsen_z = dd2->refine_z = dd->coarsen_z;
1198: if (dd->refine_z_hier) {
1199: if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_z_hier_n) dd2->refine_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 1];
1200: if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_z_hier_n) dd2->coarsen_z = dd->refine_z_hier[dmf->levelup - dmf->leveldown - 2];
1201: dd2->refine_z_hier_n = dd->refine_z_hier_n;
1202: PetscCall(PetscMalloc1(dd2->refine_z_hier_n, &dd2->refine_z_hier));
1203: PetscCall(PetscArraycpy(dd2->refine_z_hier, dd->refine_z_hier, dd2->refine_z_hier_n));
1204: }
1205: if (dd->refine_y_hier) {
1206: if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_y_hier_n) dd2->refine_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 1];
1207: if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_y_hier_n) dd2->coarsen_y = dd->refine_y_hier[dmf->levelup - dmf->leveldown - 2];
1208: dd2->refine_y_hier_n = dd->refine_y_hier_n;
1209: PetscCall(PetscMalloc1(dd2->refine_y_hier_n, &dd2->refine_y_hier));
1210: PetscCall(PetscArraycpy(dd2->refine_y_hier, dd->refine_y_hier, dd2->refine_y_hier_n));
1211: }
1212: if (dd->refine_x_hier) {
1213: if (dmf->levelup - dmf->leveldown - 1 > -1 && dmf->levelup - dmf->leveldown - 1 < dd->refine_x_hier_n) dd2->refine_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 1];
1214: if (dmf->levelup - dmf->leveldown - 2 > -1 && dmf->levelup - dmf->leveldown - 2 < dd->refine_x_hier_n) dd2->coarsen_x = dd->refine_x_hier[dmf->levelup - dmf->leveldown - 2];
1215: dd2->refine_x_hier_n = dd->refine_x_hier_n;
1216: PetscCall(PetscMalloc1(dd2->refine_x_hier_n, &dd2->refine_x_hier));
1217: PetscCall(PetscArraycpy(dd2->refine_x_hier, dd->refine_x_hier, dd2->refine_x_hier_n));
1218: }
1220: /* copy vector type information */
1221: PetscCall(DMSetVecType(dmc2, dmf->vectype));
1223: dd2->lf = dd->lf;
1224: dd2->lj = dd->lj;
1226: dmc2->leveldown = dmf->leveldown + 1;
1227: dmc2->levelup = dmf->levelup;
1229: PetscCall(DMSetUp(dmc2));
1231: /* inject coordinates if they are set on the fine grid */
1232: PetscCall(DMGetCoordinates(dmf, &coordsf));
1233: if (coordsf) {
1234: DM cdaf, cdac;
1235: Mat inject;
1236: VecScatter vscat;
1238: PetscCall(DMGetCoordinateDM(dmf, &cdaf));
1239: PetscCall(DMGetCoordinateDM(dmc2, &cdac));
1240: /* force creation of the coordinate vector */
1241: PetscCall(DMDASetUniformCoordinates(dmc2, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0));
1242: PetscCall(DMGetCoordinates(dmc2, &coordsc));
1244: PetscCall(DMCreateInjection(cdac, cdaf, &inject));
1245: PetscCall(MatScatterGetVecScatter(inject, &vscat));
1246: PetscCall(VecScatterBegin(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1247: PetscCall(VecScatterEnd(vscat, coordsf, coordsc, INSERT_VALUES, SCATTER_FORWARD));
1248: PetscCall(MatDestroy(&inject));
1249: }
1251: for (i = 0; i < dmf->bs; i++) {
1252: const char *fieldname;
1253: PetscCall(DMDAGetFieldName(dmf, i, &fieldname));
1254: PetscCall(DMDASetFieldName(dmc2, i, fieldname));
1255: }
1257: *dmc = dmc2;
1258: PetscFunctionReturn(PETSC_SUCCESS);
1259: }
1261: PetscErrorCode DMRefineHierarchy_DA(DM da, PetscInt nlevels, DM daf[])
1262: {
1263: PetscInt i, n, *refx, *refy, *refz;
1265: PetscFunctionBegin;
1267: PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1268: if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1271: /* Get refinement factors, defaults taken from the coarse DMDA */
1272: PetscCall(PetscMalloc3(nlevels, &refx, nlevels, &refy, nlevels, &refz));
1273: for (i = 0; i < nlevels; i++) PetscCall(DMDAGetRefinementFactor(da, &refx[i], &refy[i], &refz[i]));
1274: n = nlevels;
1275: PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_x", refx, &n, NULL));
1276: n = nlevels;
1277: PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_y", refy, &n, NULL));
1278: n = nlevels;
1279: PetscCall(PetscOptionsGetIntArray(((PetscObject)da)->options, ((PetscObject)da)->prefix, "-da_refine_hierarchy_z", refz, &n, NULL));
1281: PetscCall(DMDASetRefinementFactor(da, refx[0], refy[0], refz[0]));
1282: PetscCall(DMRefine(da, PetscObjectComm((PetscObject)da), &daf[0]));
1283: for (i = 1; i < nlevels; i++) {
1284: PetscCall(DMDASetRefinementFactor(daf[i - 1], refx[i], refy[i], refz[i]));
1285: PetscCall(DMRefine(daf[i - 1], PetscObjectComm((PetscObject)da), &daf[i]));
1286: }
1287: PetscCall(PetscFree3(refx, refy, refz));
1288: PetscFunctionReturn(PETSC_SUCCESS);
1289: }
1291: PetscErrorCode DMCoarsenHierarchy_DA(DM da, PetscInt nlevels, DM dac[])
1292: {
1293: PetscInt i;
1295: PetscFunctionBegin;
1297: PetscCheck(nlevels >= 0, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_OUTOFRANGE, "nlevels cannot be negative");
1298: if (nlevels == 0) PetscFunctionReturn(PETSC_SUCCESS);
1300: PetscCall(DMCoarsen(da, PetscObjectComm((PetscObject)da), &dac[0]));
1301: for (i = 1; i < nlevels; i++) PetscCall(DMCoarsen(dac[i - 1], PetscObjectComm((PetscObject)da), &dac[i]));
1302: PetscFunctionReturn(PETSC_SUCCESS);
1303: }
1305: PetscErrorCode DMDASetGLLCoordinates_1d(DM dm, PetscInt n, PetscReal *nodes)
1306: {
1307: PetscInt i, j, xs, xn, q;
1308: PetscScalar *xx;
1309: PetscReal h;
1310: Vec x;
1311: DM_DA *da = (DM_DA *)dm->data;
1313: PetscFunctionBegin;
1314: if (da->bx != DM_BOUNDARY_PERIODIC) {
1315: PetscCall(DMDAGetInfo(dm, NULL, &q, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
1316: q = (q - 1) / (n - 1); /* number of spectral elements */
1317: h = 2.0 / q;
1318: PetscCall(DMDAGetCorners(dm, &xs, NULL, NULL, &xn, NULL, NULL));
1319: xs = xs / (n - 1);
1320: xn = xn / (n - 1);
1321: PetscCall(DMDASetUniformCoordinates(dm, -1., 1., 0., 0., 0., 0.));
1322: PetscCall(DMGetCoordinates(dm, &x));
1323: PetscCall(DMDAVecGetArray(dm, x, &xx));
1325: /* loop over local spectral elements */
1326: for (j = xs; j < xs + xn; j++) {
1327: /*
1328: Except for the first process, each process starts on the second GLL point of the first element on that process
1329: */
1330: for (i = (j == xs && xs > 0) ? 1 : 0; i < n; i++) xx[j * (n - 1) + i] = -1.0 + h * j + h * (nodes[i] + 1.0) / 2.;
1331: }
1332: PetscCall(DMDAVecRestoreArray(dm, x, &xx));
1333: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for periodic");
1334: PetscFunctionReturn(PETSC_SUCCESS);
1335: }
1337: /*@
1339: DMDASetGLLCoordinates - Sets the global coordinates from -1 to 1 to the GLL points of as many GLL elements that fit the number of grid points
1341: Collective on da
1343: Input Parameters:
1344: + da - the `DMDA` object
1345: - n - the number of GLL nodes
1346: - nodes - the GLL nodes
1348: Level: advanced
1350: Note:
1351: The parallel decomposition of grid points must correspond to the degree of the GLL. That is, the number of grid points
1352: on each process much be divisible by the number of GLL elements needed per process. This depends on whether the `DM` is
1353: periodic or not.
1355: .seealso: `DM`, `DMDA`, `DMDACreate()`, `PetscDTGaussLobattoLegendreQuadrature()`, `DMGetCoordinates()`
1356: @*/
1357: PetscErrorCode DMDASetGLLCoordinates(DM da, PetscInt n, PetscReal *nodes)
1358: {
1359: PetscFunctionBegin;
1360: if (da->dim == 1) {
1361: PetscCall(DMDASetGLLCoordinates_1d(da, n, nodes));
1362: } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented for 2 or 3d");
1363: PetscFunctionReturn(PETSC_SUCCESS);
1364: }
1366: PETSC_INTERN PetscErrorCode DMGetCompatibility_DA(DM da1, DM dm2, PetscBool *compatible, PetscBool *set)
1367: {
1368: DM_DA *dd1 = (DM_DA *)da1->data, *dd2;
1369: DM da2;
1370: DMType dmtype2;
1371: PetscBool isda, compatibleLocal;
1372: PetscInt i;
1374: PetscFunctionBegin;
1375: PetscCheck(da1->setupcalled, PetscObjectComm((PetscObject)da1), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on first DM before DMGetCompatibility()");
1376: PetscCall(DMGetType(dm2, &dmtype2));
1377: PetscCall(PetscStrcmp(dmtype2, DMDA, &isda));
1378: if (isda) {
1379: da2 = dm2;
1380: dd2 = (DM_DA *)da2->data;
1381: PetscCheck(da2->setupcalled, PetscObjectComm((PetscObject)da2), PETSC_ERR_ARG_WRONGSTATE, "DMSetUp() must be called on second DM before DMGetCompatibility()");
1382: compatibleLocal = (PetscBool)(da1->dim == da2->dim);
1383: if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->s == dd2->s)); /* Stencil width */
1384: /* Global size ranks Boundary type */
1385: if (compatibleLocal) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->M == dd2->M) && (dd1->m == dd2->m) && (dd1->bx == dd2->bx));
1386: if (compatibleLocal && da1->dim > 1) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->N == dd2->N) && (dd1->n == dd2->n) && (dd1->by == dd2->by));
1387: if (compatibleLocal && da1->dim > 2) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->P == dd2->P) && (dd1->p == dd2->p) && (dd1->bz == dd2->bz));
1388: if (compatibleLocal) {
1389: for (i = 0; i < dd1->m; ++i) { compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lx[i] == dd2->lx[i])); /* Local size */ }
1390: }
1391: if (compatibleLocal && da1->dim > 1) {
1392: for (i = 0; i < dd1->n; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->ly[i] == dd2->ly[i]));
1393: }
1394: if (compatibleLocal && da1->dim > 2) {
1395: for (i = 0; i < dd1->p; ++i) compatibleLocal = (PetscBool)(compatibleLocal && (dd1->lz[i] == dd2->lz[i]));
1396: }
1397: *compatible = compatibleLocal;
1398: *set = PETSC_TRUE;
1399: } else {
1400: /* Decline to determine compatibility with other DM types */
1401: *set = PETSC_FALSE;
1402: }
1403: PetscFunctionReturn(PETSC_SUCCESS);
1404: }