Bug Summary

File:dm/impls/plex/plexdistribute.c
Warning:line 196, column 10
Array access (via field 'flags') results in a null pointer dereference

Annotated Source Code

[?] Use j/k keys for keyboard navigation

/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c

1#include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/
2#include <petsc/private/dmlabelimpl.h> /*I "petscdmlabel.h" I*/
3
4/*@C
5 DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6
7 Input Parameters:
8+ dm - The DM object
9. user - The user callback, may be NULL (to clear the callback)
10- ctx - context for callback evaluation, may be NULL
11
12 Level: advanced
13
14 Notes:
15 The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16
17 Any setting here overrides other configuration of DMPlex adjacency determination.
18
19.seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20@*/
21PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22{
23 DM_Plex *mesh = (DM_Plex *)dm->data;
24
25 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 25; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
26 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),26,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),26,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),26,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),26,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
27 mesh->useradjacency = user;
28 mesh->useradjacencyctx = ctx;
29 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
30}
31
32/*@C
33 DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34
35 Input Parameter:
36. dm - The DM object
37
38 Output Parameters:
39- user - The user callback
40- ctx - context for callback evaluation
41
42 Level: advanced
43
44.seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45@*/
46PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47{
48 DM_Plex *mesh = (DM_Plex *)dm->data;
49
50 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 50; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
51 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),51,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),51,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),51,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),51,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
52 if (user) *user = mesh->useradjacency;
53 if (ctx) *ctx = mesh->useradjacencyctx;
54 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
55}
56
57/*@
58 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59
60 Input Parameters:
61+ dm - The DM object
62- useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63
64 Level: intermediate
65
66.seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67@*/
68PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69{
70 DM_Plex *mesh = (DM_Plex *) dm->data;
71
72 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 72; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
73 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),73,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),73,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),73,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),73,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
74 mesh->useAnchors = useAnchors;
75 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
76}
77
78/*@
79 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80
81 Input Parameter:
82. dm - The DM object
83
84 Output Parameter:
85. useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86
87 Level: intermediate
88
89.seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90@*/
91PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92{
93 DM_Plex *mesh = (DM_Plex *) dm->data;
94
95 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 95; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
96 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),96,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),96,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),96,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),96,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
97 PetscValidIntPointer(useAnchors, 2)do { if (!useAnchors) return PetscError(((MPI_Comm)0x44000001
),97,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(useAnchors,PETSC_INT)) return PetscError
(((MPI_Comm)0x44000001),97,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,2); } while (0)
;
98 *useAnchors = mesh->useAnchors;
99 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
100}
101
102static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103{
104 const PetscInt *cone = NULL((void*)0);
105 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 PetscErrorCode ierr;
107
108 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 108; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
109 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),109,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
110 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),110,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
111 for (c = 0; c <= coneSize; ++c) {
112 const PetscInt point = !c ? p : cone[c-1];
113 const PetscInt *support = NULL((void*)0);
114 PetscInt supportSize, s, q;
115
116 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),116,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
117 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),117,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
118 for (s = 0; s < supportSize; ++s) {
119 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120 if (support[s] == adj[q]) break;
121 }
122 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize)return PetscError(((MPI_Comm)0x44000001),122,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Invalid mesh exceeded adjacency allocation (%D)"
,maxAdjSize)
;
123 }
124 }
125 *adjSize = numAdj;
126 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
127}
128
129static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130{
131 const PetscInt *support = NULL((void*)0);
132 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
133 PetscErrorCode ierr;
134
135 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 135; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
136 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),136,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
137 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),137,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
138 for (s = 0; s <= supportSize; ++s) {
139 const PetscInt point = !s ? p : support[s-1];
140 const PetscInt *cone = NULL((void*)0);
141 PetscInt coneSize, c, q;
142
143 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),143,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
144 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),144,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
145 for (c = 0; c < coneSize; ++c) {
146 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147 if (cone[c] == adj[q]) break;
148 }
149 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize)return PetscError(((MPI_Comm)0x44000001),149,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Invalid mesh exceeded adjacency allocation (%D)"
,maxAdjSize)
;
150 }
151 }
152 *adjSize = numAdj;
153 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
154}
155
156static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157{
158 PetscInt *star = NULL((void*)0);
159 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160 PetscErrorCode ierr;
161
162 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 162; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
163 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),163,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
164 for (s = 0; s < starSize*2; s += 2) {
165 const PetscInt *closure = NULL((void*)0);
166 PetscInt closureSize, c, q;
167
168 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),168,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
169 for (c = 0; c < closureSize*2; c += 2) {
170 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171 if (closure[c] == adj[q]) break;
172 }
173 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize)return PetscError(((MPI_Comm)0x44000001),173,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Invalid mesh exceeded adjacency allocation (%D)"
,maxAdjSize)
;
174 }
175 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),175,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
176 }
177 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),177,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
178 *adjSize = numAdj;
179 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
180}
181
182PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183{
184 static PetscInt asiz = 0;
185 PetscInt maxAnchors = 1;
186 PetscInt aStart = -1, aEnd = -1;
187 PetscInt maxAdjSize;
188 PetscSection aSec = NULL((void*)0);
189 IS aIS = NULL((void*)0);
190 const PetscInt *anchors;
191 DM_Plex *mesh = (DM_Plex *)dm->data;
192 PetscErrorCode ierr;
193
194 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 194; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
195 if (useAnchors) {
196 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),196,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
197 if (aSec) {
198 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),198,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
199 maxAnchors = PetscMax(1,maxAnchors)(((1)<(maxAnchors)) ? (maxAnchors) : (1));
200 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),200,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
201 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),201,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
202 }
203 }
204 if (!*adj) {
205 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206
207 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),207,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
208 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),208,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
209 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),209,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
210 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
211 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
212 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries)(((PetscPowInt(maxS,depth)*coneSeries)<(PetscPowInt(maxC,depth
)*supportSeries)) ? (PetscPowInt(maxC,depth)*supportSeries) :
(PetscPowInt(maxS,depth)*coneSeries))
;
213 asiz *= maxAnchors;
214 asiz = PetscMin(asiz,pEnd-pStart)(((asiz)<(pEnd-pStart)) ? (asiz) : (pEnd-pStart));
215 ierr = PetscMalloc1(asiz,adj)PetscMallocA(1,PETSC_FALSE,215,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(asiz)*sizeof(**(adj)),(adj))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),215,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
216 }
217 if (*adjSize < 0) *adjSize = asiz;
218 maxAdjSize = *adjSize;
219 if (mesh->useradjacency) {
220 ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),220,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
221 } else if (useTransitiveClosure) {
222 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),222,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
223 } else if (useCone) {
224 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),224,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
225 } else {
226 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),226,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
227 }
228 if (useAnchors && aSec) {
229 PetscInt origSize = *adjSize;
230 PetscInt numAdj = origSize;
231 PetscInt i = 0, j;
232 PetscInt *orig = *adj;
233
234 while (i < origSize) {
235 PetscInt p = orig[i];
236 PetscInt aDof = 0;
237
238 if (p >= aStart && p < aEnd) {
239 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),239,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
240 }
241 if (aDof) {
242 PetscInt aOff;
243 PetscInt s, q;
244
245 for (j = i + 1; j < numAdj; j++) {
246 orig[j - 1] = orig[j];
247 }
248 origSize--;
249 numAdj--;
250 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),250,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
251 for (s = 0; s < aDof; ++s) {
252 for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
253 if (anchors[aOff+s] == orig[q]) break;
254 }
255 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize)return PetscError(((MPI_Comm)0x44000001),255,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Invalid mesh exceeded adjacency allocation (%D)"
,maxAdjSize)
;
256 }
257 }
258 else {
259 i++;
260 }
261 }
262 *adjSize = numAdj;
263 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),263,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
264 }
265 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
266}
267
268/*@
269 DMPlexGetAdjacency - Return all points adjacent to the given point
270
271 Input Parameters:
272+ dm - The DM object
273. p - The point
274. adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
275- adj - Either NULL so that the array is allocated, or an existing array with size adjSize
276
277 Output Parameters:
278+ adjSize - The number of adjacent points
279- adj - The adjacent points
280
281 Level: advanced
282
283 Notes:
284 The user must PetscFree the adj array if it was not passed in.
285
286.seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
287@*/
288PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
289{
290 PetscBool useCone, useClosure, useAnchors;
291 PetscErrorCode ierr;
292
293 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 293; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
294 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),294,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),294,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),294,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),294,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
295 PetscValidPointer(adjSize,3)do { if (!adjSize) return PetscError(((MPI_Comm)0x44000001),295
,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(adjSize,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),295,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
296 PetscValidPointer(adj,4)do { if (!adj) return PetscError(((MPI_Comm)0x44000001),296,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(adj,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),296,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
297 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),297,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
298 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),298,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
299 ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),299,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
300 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
301}
302
303/*@
304 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
305
306 Collective on dm
307
308 Input Parameters:
309+ dm - The DM
310- sfPoint - The PetscSF which encodes point connectivity
311
312 Output Parameters:
313+ processRanks - A list of process neighbors, or NULL
314- sfProcess - An SF encoding the two-sided process connectivity, or NULL
315
316 Level: developer
317
318.seealso: PetscSFCreate(), DMPlexCreateProcessSF()
319@*/
320PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321{
322 const PetscSFNode *remotePoints;
323 PetscInt *localPointsNew;
324 PetscSFNode *remotePointsNew;
325 const PetscInt *nranks;
326 PetscInt *ranksNew;
327 PetscBT neighbors;
328 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329 PetscMPIInt size, proc, rank;
330 PetscErrorCode ierr;
331
332 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 332; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
333 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),333,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),333,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),333,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),333,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
334 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2)do { if (!sfPoint) return PetscError(((MPI_Comm)0x44000001),334
,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(sfPoint,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),334,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(sfPoint))->classid != PETSCSF_CLASSID
) { if (((PetscObject)(sfPoint))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),334,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),334,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,2); } } while (0)
;
335 if (processRanks) {PetscValidPointer(processRanks, 3)do { if (!processRanks) return PetscError(((MPI_Comm)0x44000001
),335,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(processRanks,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),335,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;}
336 if (sfProcess) {PetscValidPointer(sfProcess, 4)do { if (!sfProcess) return PetscError(((MPI_Comm)0x44000001)
,336,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(sfProcess,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),336,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;}
337 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),337,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
338 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),338,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
339 ierr = PetscSFGetGraph(sfPoint, NULL((void*)0), &numLeaves, NULL((void*)0), &remotePoints);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),339,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
340 ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),340,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
341 ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),341,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
342 /* Compute root-to-leaf process connectivity */
343 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),343,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
344 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),344,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
345 for (p = pStart; p < pEnd; ++p) {
346 PetscInt ndof, noff, n;
347
348 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),348,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
349 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),349,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
350 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),350,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
351 }
352 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),352,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
353 /* Compute leaf-to-neighbor process connectivity */
354 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),354,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
355 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),355,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
356 for (p = pStart; p < pEnd; ++p) {
357 PetscInt ndof, noff, n;
358
359 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),359,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
360 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),360,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
361 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),361,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
362 }
363 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),363,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
364 /* Compute leaf-to-root process connectivity */
365 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
366 /* Calculate edges */
367 PetscBTClear(neighbors, rank);
368 for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
369 ierr = PetscMalloc1(numNeighbors, &ranksNew)PetscMallocA(1,PETSC_FALSE,369,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numNeighbors)*sizeof(**(&ranksNew)),(&ranksNew
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),369,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
370 ierr = PetscMalloc1(numNeighbors, &localPointsNew)PetscMallocA(1,PETSC_FALSE,370,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numNeighbors)*sizeof(**(&localPointsNew)),(&
localPointsNew))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),370,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
371 ierr = PetscMalloc1(numNeighbors, &remotePointsNew)PetscMallocA(1,PETSC_FALSE,371,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numNeighbors)*sizeof(**(&remotePointsNew)),(&
remotePointsNew))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),371,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
372 for(proc = 0, n = 0; proc < size; ++proc) {
373 if (PetscBTLookup(neighbors, proc)) {
374 ranksNew[n] = proc;
375 localPointsNew[n] = proc;
376 remotePointsNew[n].index = rank;
377 remotePointsNew[n].rank = proc;
378 ++n;
379 }
380 }
381 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),381,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
382 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),382,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
383 else {ierr = PetscFree(ranksNew)((*PetscTrFree)((void*)(ranksNew),383,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((ranksNew) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),383,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
384 if (sfProcess) {
385 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),385,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
386 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),386,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
387 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),387,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
388 ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),388,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
389 }
390 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
391}
392
393/*@
394 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
395
396 Collective on dm
397
398 Input Parameter:
399. dm - The DM
400
401 Output Parameters:
402+ rootSection - The number of leaves for a given root point
403. rootrank - The rank of each edge into the root point
404. leafSection - The number of processes sharing a given leaf point
405- leafrank - The rank of each process sharing a leaf point
406
407 Level: developer
408
409.seealso: DMPlexCreateOverlap()
410@*/
411PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
412{
413 MPI_Comm comm;
414 PetscSF sfPoint;
415 const PetscInt *rootdegree;
416 PetscInt *myrank, *remoterank;
417 PetscInt pStart, pEnd, p, nedges;
418 PetscMPIInt rank;
419 PetscErrorCode ierr;
420
421 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 421; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
422 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),422,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
423 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),423,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
424 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),424,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
425 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),425,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
426 /* Compute number of leaves for each root */
427 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),427,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
428 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),428,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
429 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),429,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
430 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),430,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
431 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),431,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
432 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),432,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
433 /* Gather rank of each leaf to root */
434 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),434,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
435 ierr = PetscMalloc1(pEnd-pStart, &myrank)PetscMallocA(1,PETSC_FALSE,435,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(pEnd-pStart)*sizeof(**(&myrank)),(&myrank))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),435,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
436 ierr = PetscMalloc1(nedges, &remoterank)PetscMallocA(1,PETSC_FALSE,436,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nedges)*sizeof(**(&remoterank)),(&remoterank
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),436,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
437 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
438 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT((MPI_Datatype)0x4c000405), myrank, remoterank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),438,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
439 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT((MPI_Datatype)0x4c000405), myrank, remoterank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),439,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
440 ierr = PetscFree(myrank)((*PetscTrFree)((void*)(myrank),440,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((myrank) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),440,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
441 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),441,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
442 /* Distribute remote ranks to leaves */
443 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),443,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
444 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),444,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
445 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
446}
447
448/*@C
449 DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
450
451 Collective on dm
452
453 Input Parameters:
454+ dm - The DM
455. levels - Number of overlap levels
456. rootSection - The number of leaves for a given root point
457. rootrank - The rank of each edge into the root point
458. leafSection - The number of processes sharing a given leaf point
459- leafrank - The rank of each process sharing a leaf point
460
461 Output Parameter:
462. ovLabel - DMLabel containing remote overlap contributions as point/rank pairings
463
464 Level: developer
465
466.seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
467@*/
468PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
469{
470 MPI_Comm comm;
471 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
472 PetscSF sfPoint;
473 const PetscSFNode *remote;
474 const PetscInt *local;
475 const PetscInt *nrank, *rrank;
476 PetscInt *adj = NULL((void*)0);
477 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l;
478 PetscMPIInt rank, size;
479 PetscBool flg;
480 PetscErrorCode ierr;
481
482 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 482; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
483 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),483,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
484 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),484,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
485 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),485,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
486 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),486,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
487 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),487,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
488 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),488,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
489 ierr = PetscSFGetGraph(sfPoint, NULL((void*)0), &nleaves, &local, &remote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),489,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
490 ierr = DMLabelCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),490,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
491 /* Handle leaves: shared with the root point */
492 for (l = 0; l < nleaves; ++l) {
493 PetscInt adjSize = PETSC_DETERMINE-1, a;
494
495 ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),495,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
496 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),496,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
497 }
498 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),498,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
499 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),499,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
500 /* Handle roots */
501 for (p = pStart; p < pEnd; ++p) {
502 PetscInt adjSize = PETSC_DETERMINE-1, neighbors = 0, noff, n, a;
503
504 if ((p >= sStart) && (p < sEnd)) {
505 /* Some leaves share a root with other leaves on different processes */
506 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),506,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
507 if (neighbors) {
508 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),508,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
509 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),509,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
510 for (n = 0; n < neighbors; ++n) {
511 const PetscInt remoteRank = nrank[noff+n];
512
513 if (remoteRank == rank) continue;
514 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),514,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
515 }
516 }
517 }
518 /* Roots are shared with leaves */
519 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),519,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
520 if (!neighbors) continue;
521 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),521,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
522 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),522,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
523 for (n = 0; n < neighbors; ++n) {
524 const PetscInt remoteRank = rrank[noff+n];
525
526 if (remoteRank == rank) continue;
527 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),527,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
528 }
529 }
530 ierr = PetscFree(adj)((*PetscTrFree)((void*)(adj),530,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((adj) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),530,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
531 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),531,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
532 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),532,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
533 /* Add additional overlap levels */
534 for (l = 1; l < levels; l++) {
535 /* Propagate point donations over SF to capture remote connections */
536 ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),536,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
537 /* Add next level of point donations to the label */
538 ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),538,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
539 }
540 /* We require the closure in the overlap */
541 ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),541,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
542 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),542,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
543 if (flg) {
544 PetscViewer viewer;
545 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),545,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
546 ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),546,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
547 }
548 /* Invert sender to receiver label */
549 ierr = DMLabelCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Overlap label", ovLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),549,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
550 ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL((void*)0), *ovLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),550,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
551 /* Add owned points, except for shared local points */
552 for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),552,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
553 for (l = 0; l < nleaves; ++l) {
554 ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),554,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
555 ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),555,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
556 }
557 /* Clean up */
558 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),558,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
559 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
560}
561
562/*@C
563 DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
564
565 Collective on dm
566
567 Input Parameters:
568+ dm - The DM
569- overlapSF - The SF mapping ghost points in overlap to owner points on other processes
570
571 Output Parameters:
572. migrationSF - An SF that maps original points in old locations to points in new locations
573
574 Level: developer
575
576.seealso: DMPlexCreateOverlap(), DMPlexDistribute()
577@*/
578PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
579{
580 MPI_Comm comm;
581 PetscMPIInt rank, size;
582 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
583 PetscInt *pointDepths, *remoteDepths, *ilocal;
584 PetscInt *depthRecv, *depthShift, *depthIdx;
585 PetscSFNode *iremote;
586 PetscSF pointSF;
587 const PetscInt *sharedLocal;
588 const PetscSFNode *overlapRemote, *sharedRemote;
589 PetscErrorCode ierr;
590
591 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 591; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
592 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),592,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),592,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),592,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),592,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
593 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),593,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
594 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),594,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
595 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),595,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
596 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),596,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
597
598 /* Before building the migration SF we need to know the new stratum offsets */
599 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL((void*)0), &overlapRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),599,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
600 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths)PetscMallocA(2,PETSC_FALSE,600,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nroots)*sizeof(**(&pointDepths)),(&pointDepths
),(size_t)(nleaves)*sizeof(**(&remoteDepths)),(&remoteDepths
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),600,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
601 for (d=0; d<dim+1; d++) {
602 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),602,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
603 for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
604 }
605 for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
606 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT((MPI_Datatype)0x4c000405), pointDepths, remoteDepths);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),606,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
607 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT((MPI_Datatype)0x4c000405), pointDepths, remoteDepths);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),607,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
608
609 /* Count recevied points in each stratum and compute the internal strata shift */
610 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx)PetscMallocA(3,PETSC_FALSE,610,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(dim+1)*sizeof(**(&depthRecv)),(&depthRecv),
(size_t)(dim+1)*sizeof(**(&depthShift)),(&depthShift)
,(size_t)(dim+1)*sizeof(**(&depthIdx)),(&depthIdx))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),610,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
611 for (d=0; d<dim+1; d++) depthRecv[d]=0;
612 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
613 depthShift[dim] = 0;
614 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
615 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
616 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
617 for (d=0; d<dim+1; d++) {
618 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),618,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
619 depthIdx[d] = pStart + depthShift[d];
620 }
621
622 /* Form the overlap SF build an SF that describes the full overlap migration SF */
623 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),623,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
624 newLeaves = pEnd - pStart + nleaves;
625 ierr = PetscMalloc1(newLeaves, &ilocal)PetscMallocA(1,PETSC_FALSE,625,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(newLeaves)*sizeof(**(&ilocal)),(&ilocal))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),625,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
626 ierr = PetscMalloc1(newLeaves, &iremote)PetscMallocA(1,PETSC_FALSE,626,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(newLeaves)*sizeof(**(&iremote)),(&iremote))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),626,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
627 /* First map local points to themselves */
628 for (d=0; d<dim+1; d++) {
629 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),629,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
630 for (p=pStart; p<pEnd; p++) {
631 point = p + depthShift[d];
632 ilocal[point] = point;
633 iremote[point].index = p;
634 iremote[point].rank = rank;
635 depthIdx[d]++;
636 }
637 }
638
639 /* Add in the remote roots for currently shared points */
640 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),640,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
641 ierr = PetscSFGetGraph(pointSF, NULL((void*)0), &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),641,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
642 for (d=0; d<dim+1; d++) {
643 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),643,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
644 for (p=0; p<numSharedPoints; p++) {
645 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
646 point = sharedLocal[p] + depthShift[d];
647 iremote[point].index = sharedRemote[p].index;
648 iremote[point].rank = sharedRemote[p].rank;
649 }
650 }
651 }
652
653 /* Now add the incoming overlap points */
654 for (p=0; p<nleaves; p++) {
655 point = depthIdx[remoteDepths[p]];
656 ilocal[point] = point;
657 iremote[point].index = overlapRemote[p].index;
658 iremote[point].rank = overlapRemote[p].rank;
659 depthIdx[remoteDepths[p]]++;
660 }
661 ierr = PetscFree2(pointDepths,remoteDepths)PetscFreeA(2,661,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,&(pointDepths),&(remoteDepths))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),661,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
662
663 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),663,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
664 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),664,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
665 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),665,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
666 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),666,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
667 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),667,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
668
669 ierr = PetscFree3(depthRecv, depthShift, depthIdx)PetscFreeA(3,669,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,&(depthRecv),&(depthShift),&(depthIdx))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),669,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
670 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
671}
672
673/*@
674 DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
675
676 Input Parameter:
677+ dm - The DM
678- sf - A star forest with non-ordered leaves, usually defining a DM point migration
679
680 Output Parameter:
681. migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
682
683 Level: developer
684
685.seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
686@*/
687PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
688{
689 MPI_Comm comm;
690 PetscMPIInt rank, size;
691 PetscInt d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
692 PetscInt *pointDepths, *remoteDepths, *ilocal;
693 PetscInt *depthRecv, *depthShift, *depthIdx;
694 PetscInt hybEnd[4];
695 const PetscSFNode *iremote;
696 PetscErrorCode ierr;
697
698 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 698; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
699 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),699,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),699,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),699,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),699,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
700 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),700,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
701 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),701,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
702 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),702,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
703 ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),703,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
704 ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm)(PetscAllreduceBarrierCheck(comm,1,704,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&ldepth),(&depth),(1),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),704,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
705 if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth)return PetscError(((MPI_Comm)0x44000001),705,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Inconsistent Plex depth %d != %d",ldepth
,depth)
;
706 ierr = PetscLogEventBegin(DMPLEX_PartStratSF,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_PartStratSF].active) ? (*PetscLogPLB)((DMPLEX_PartStratSF
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),706,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
707
708 /* Before building the migration SF we need to know the new stratum offsets */
709 ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL((void*)0), &iremote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),709,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
710 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths)PetscMallocA(2,PETSC_FALSE,710,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nroots)*sizeof(**(&pointDepths)),(&pointDepths
),(size_t)(nleaves)*sizeof(**(&remoteDepths)),(&remoteDepths
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),710,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
711 ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[PetscMax(depth-1,0)(((depth-1)<(0)) ? (0) : (depth-1))],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),711,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
712 for (d = 0; d < depth+1; ++d) {
713 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),713,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
714 for (p = pStart; p < pEnd; ++p) {
715 if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
716 pointDepths[p] = 2 * d;
717 } else {
718 pointDepths[p] = 2 * d + 1;
719 }
720 }
721 }
722 for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
723 ierr = PetscSFBcastBegin(sf, MPIU_INT((MPI_Datatype)0x4c000405), pointDepths, remoteDepths);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),723,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
724 ierr = PetscSFBcastEnd(sf, MPIU_INT((MPI_Datatype)0x4c000405), pointDepths, remoteDepths);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),724,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
725 /* Count received points in each stratum and compute the internal strata shift */
726 ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx)PetscMallocA(3,PETSC_FALSE,726,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(2*(depth+1))*sizeof(**(&depthRecv)),(&depthRecv
),(size_t)(2*(depth+1))*sizeof(**(&depthShift)),(&depthShift
),(size_t)(2*(depth+1))*sizeof(**(&depthIdx)),(&depthIdx
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),726,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
727 for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
728 for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
729 depthShift[2*depth+1] = 0;
730 for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
731 for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
732 depthShift[0] += depthRecv[1];
733 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
734 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
735 for (d = 2 * depth-1; d > 2; --d) {
736 PetscInt e;
737
738 for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
739 }
740 for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
741 /* Derive a new local permutation based on stratified indices */
742 ierr = PetscMalloc1(nleaves, &ilocal)PetscMallocA(1,PETSC_FALSE,742,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nleaves)*sizeof(**(&ilocal)),(&ilocal))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),742,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
743 for (p = 0; p < nleaves; ++p) {
744 const PetscInt dep = remoteDepths[p];
745
746 ilocal[p] = depthShift[dep] + depthIdx[dep];
747 depthIdx[dep]++;
748 }
749 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),749,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
750 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),750,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
751 ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),751,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
752 ierr = PetscFree2(pointDepths,remoteDepths)PetscFreeA(2,752,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,&(pointDepths),&(remoteDepths))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),752,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
753 ierr = PetscFree3(depthRecv, depthShift, depthIdx)PetscFreeA(3,753,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,&(depthRecv),&(depthShift),&(depthIdx))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),753,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
754 ierr = PetscLogEventEnd(DMPLEX_PartStratSF,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_PartStratSF].active) ? (*PetscLogPLE)((DMPLEX_PartStratSF
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),754,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
755 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
756}
757
758/*@
759 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
760
761 Collective on dm
762
763 Input Parameters:
764+ dm - The DMPlex object
765. pointSF - The PetscSF describing the communication pattern
766. originalSection - The PetscSection for existing data layout
767- originalVec - The existing data in a local vector
768
769 Output Parameters:
770+ newSection - The PetscSF describing the new data layout
771- newVec - The new data in a local vector
772
773 Level: developer
774
775.seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
776@*/
777PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
778{
779 PetscSF fieldSF;
780 PetscInt *remoteOffsets, fieldSize;
781 PetscScalar *originalValues, *newValues;
782 PetscErrorCode ierr;
783
784 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 784; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
785 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeField].active) ? (*PetscLogPLB)((DMPLEX_DistributeField
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),785,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
786 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),786,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
787
788 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),788,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
789 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),789,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
790 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),790,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
791
792 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),792,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
793 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),793,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
794 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),794,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
795 ierr = PetscFree(remoteOffsets)((*PetscTrFree)((void*)(remoteOffsets),795,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsets) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),795,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
796 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR((MPI_Datatype)0x4c00080b), originalValues, newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),796,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
797 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR((MPI_Datatype)0x4c00080b), originalValues, newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),797,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
798 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),798,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
799 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),799,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
800 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),800,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
801 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeField].active) ? (*PetscLogPLE)((DMPLEX_DistributeField
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),801,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
802 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
803}
804
805/*@
806 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
807
808 Collective on dm
809
810 Input Parameters:
811+ dm - The DMPlex object
812. pointSF - The PetscSF describing the communication pattern
813. originalSection - The PetscSection for existing data layout
814- originalIS - The existing data
815
816 Output Parameters:
817+ newSection - The PetscSF describing the new data layout
818- newIS - The new data
819
820 Level: developer
821
822.seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
823@*/
824PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
825{
826 PetscSF fieldSF;
827 PetscInt *newValues, *remoteOffsets, fieldSize;
828 const PetscInt *originalValues;
829 PetscErrorCode ierr;
830
831 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 831; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
832 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeField].active) ? (*PetscLogPLB)((DMPLEX_DistributeField
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),832,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
833 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),833,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
834
835 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),835,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
836 ierr = PetscMalloc1(fieldSize, &newValues)PetscMallocA(1,PETSC_FALSE,836,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(fieldSize)*sizeof(**(&newValues)),(&newValues
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),836,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
837
838 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),838,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
839 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),839,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
840 ierr = PetscFree(remoteOffsets)((*PetscTrFree)((void*)(remoteOffsets),840,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsets) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),840,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
841 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT((MPI_Datatype)0x4c000405), (PetscInt *) originalValues, newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),841,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
842 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT((MPI_Datatype)0x4c000405), (PetscInt *) originalValues, newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),842,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
843 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),843,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
844 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),844,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
845 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),845,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
846 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeField].active) ? (*PetscLogPLE)((DMPLEX_DistributeField
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),846,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
847 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
848}
849
850/*@
851 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
852
853 Collective on dm
854
855 Input Parameters:
856+ dm - The DMPlex object
857. pointSF - The PetscSF describing the communication pattern
858. originalSection - The PetscSection for existing data layout
859. datatype - The type of data
860- originalData - The existing data
861
862 Output Parameters:
863+ newSection - The PetscSection describing the new data layout
864- newData - The new data
865
866 Level: developer
867
868.seealso: DMPlexDistribute(), DMPlexDistributeField()
869@*/
870PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
871{
872 PetscSF fieldSF;
873 PetscInt *remoteOffsets, fieldSize;
874 PetscMPIInt dataSize;
875 PetscErrorCode ierr;
876
877 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 877; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
878 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeData].active) ? (*PetscLogPLB)((DMPLEX_DistributeData
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),878,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
879 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),879,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
880
881 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),881,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
882 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),882,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
883 ierr = PetscMalloc(fieldSize * dataSize, newData)((*PetscTrMalloc)((fieldSize * dataSize),PETSC_FALSE,883,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(void**)(newData)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),883,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
884
885 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),885,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
886 ierr = PetscFree(remoteOffsets)((*PetscTrFree)((void*)(remoteOffsets),886,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsets) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),886,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
887 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),887,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
888 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),888,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
889 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),889,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
890 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeData].active) ? (*PetscLogPLE)((DMPLEX_DistributeData
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),890,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
891 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
892}
893
894static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
895{
896 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data;
897 MPI_Comm comm;
898 PetscSF coneSF;
899 PetscSection originalConeSection, newConeSection;
900 PetscInt *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
901 PetscBool flg;
902 PetscErrorCode ierr;
903
904 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 904; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
905 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),905,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),905,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),905,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),905,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
906 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),906,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",5); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),906,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),906,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),906,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,5); } } while (0)
;
907 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeCones].active) ? (*PetscLogPLB)((DMPLEX_DistributeCones
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),907,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
908 /* Distribute cone section */
909 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),909,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
910 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),910,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
911 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),911,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
912 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),912,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
913 ierr = DMSetUp(dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),913,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
914 {
915 PetscInt pStart, pEnd, p;
916
917 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),917,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
918 for (p = pStart; p < pEnd; ++p) {
919 PetscInt coneSize;
920 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),920,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
921 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize)(((pmesh->maxConeSize)<(coneSize)) ? (coneSize) : (pmesh
->maxConeSize))
;
922 }
923 }
924 /* Communicate and renumber cones */
925 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),925,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
926 ierr = PetscFree(remoteOffsets)((*PetscTrFree)((void*)(remoteOffsets),926,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsets) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),926,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
927 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),927,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
928 if (original) {
929 PetscInt numCones;
930
931 ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),931,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
932 ierr = PetscMalloc1(numCones,&globCones)PetscMallocA(1,PETSC_FALSE,932,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numCones)*sizeof(**(&globCones)),(&globCones
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),932,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
933 ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),933,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
934 } else {
935 globCones = cones;
936 }
937 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),937,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
938 ierr = PetscSFBcastBegin(coneSF, MPIU_INT((MPI_Datatype)0x4c000405), globCones, newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),938,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
939 ierr = PetscSFBcastEnd(coneSF, MPIU_INT((MPI_Datatype)0x4c000405), globCones, newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),939,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
940 if (original) {
941 ierr = PetscFree(globCones)((*PetscTrFree)((void*)(globCones),941,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((globCones) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),941,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
942 }
943 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),943,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
944 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL((void*)0), newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),944,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
945#if defined(PETSC_USE_DEBUG1)
946 {
947 PetscInt p;
948 PetscBool valid = PETSC_TRUE;
949 for (p = 0; p < newConesSize; ++p) {
950 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF((MPI_Comm)0x44000001), "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),950,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
951 }
952 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map")return PetscError(((MPI_Comm)0x44000001),952,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,63,PETSC_ERROR_INITIAL,"Invalid global to local map")
;
953 }
954#endif
955 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),955,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
956 if (flg) {
957 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),957,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
958 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),958,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
959 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),959,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
960 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),960,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
961 ierr = PetscSFView(coneSF, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),961,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
962 }
963 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),963,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
964 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),964,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
965 ierr = PetscSFBcastBegin(coneSF, MPIU_INT((MPI_Datatype)0x4c000405), cones, newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),965,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
966 ierr = PetscSFBcastEnd(coneSF, MPIU_INT((MPI_Datatype)0x4c000405), cones, newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),966,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
967 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),967,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
968 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeCones].active) ? (*PetscLogPLE)((DMPLEX_DistributeCones
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),968,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
969 /* Create supports and stratify DMPlex */
970 {
971 PetscInt pStart, pEnd;
972
973 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),973,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
974 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),974,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
975 }
976 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),976,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
977 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),977,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
978 {
979 PetscBool useCone, useClosure, useAnchors;
980
981 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),981,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
982 ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),982,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
983 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),983,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
984 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),984,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
985 }
986 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
987}
988
989static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
990{
991 MPI_Comm comm;
992 PetscSection originalCoordSection, newCoordSection;
993 Vec originalCoordinates, newCoordinates;
994 PetscInt bs;
995 PetscBool isper;
996 const char *name;
997 const PetscReal *maxCell, *L;
998 const DMBoundaryType *bd;
999 PetscErrorCode ierr;
1000
1001 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1001; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1002 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1002,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1002,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1002,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1002,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1003 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1003,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1003,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1003,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1003,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,3); } } while (0)
;
1004
1005 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1005,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1006 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1006,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1007 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1007,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1008 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1008,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1009 if (originalCoordinates) {
1010 ierr = VecCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), &newCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1010,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1011 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1011,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1012 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1012,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1013
1014 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1014,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1015 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1015,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1016 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1016,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1017 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1017,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1018 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1018,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1019 }
1020 ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1020,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1021 ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1021,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1022 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1023}
1024
1025static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1026{
1027 DM_Plex *mesh = (DM_Plex*) dm->data;
1028 MPI_Comm comm;
1029 DMLabel depthLabel;
1030 PetscMPIInt rank;
1031 PetscInt depth, d, numLabels, numLocalLabels, l;
1032 PetscBool hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1033 PetscObjectState depthState = -1;
1034 PetscErrorCode ierr;
1035
1036 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1036; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1037 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1037,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1037,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1037,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1037,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1038 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1038,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1038,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1038,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1038,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,3); } } while (0)
;
1039
1040 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeLabels].active) ? (*PetscLogPLB)((DMPLEX_DistributeLabels
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1040,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1041 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1041,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1042 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1042,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1043
1044 /* If the user has changed the depth label, communicate it instead */
1045 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1045,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1046 ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1046,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1047 if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1047,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1048 lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1049 ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm)(PetscAllreduceBarrierCheck(comm,1,1049,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&lsendDepth),(&sendDepth),(1),(MPIU_BOOL
),((MPI_Op)(0x58000007)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1049,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1050 if (sendDepth) {
1051 ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1051,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1052 ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1052,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1053 }
1054 /* Everyone must have either the same number of labels, or none */
1055 ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1055,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1056 numLabels = numLocalLabels;
1057 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm)((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) || MPI_Bcast
((&numLabels),(1),(((MPI_Datatype)0x4c000405)),(0),(comm)
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1057,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1058 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1059 for (l = numLabels-1; l >= 0; --l) {
1060 DMLabel label = NULL((void*)0), labelNew = NULL((void*)0);
1061 PetscBool isDepth, lisOutput = PETSC_TRUE, isOutput;
1062 const char *name = NULL((void*)0);
1063
1064 if (hasLabels) {
1065 ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1065,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1066 /* Skip "depth" because it is recreated */
1067 ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1067,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1068 ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1068,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1069 }
1070 ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm)((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) || MPI_Bcast
((&isDepth),(1),(MPIU_BOOL),(0),(comm)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1070,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1071 if (isDepth && !sendDepth) continue;
1072 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1072,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1073 if (isDepth) {
1074 /* Put in any missing strata which can occur if users are managing the depth label themselves */
1075 PetscInt gdepth;
1076
1077 ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm)(PetscAllreduceBarrierCheck(comm,1,1077,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&depth),(&gdepth),(1),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1077,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1078 if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth)return PetscError(((MPI_Comm)0x44000001),1078,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Inconsistent Plex depth %d != %d",depth
,gdepth)
;
1079 for (d = 0; d <= gdepth; ++d) {
1080 PetscBool has;
1081
1082 ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1082,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1083 if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1083,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1084 }
1085 }
1086 ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1086,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1087 /* Put the output flag in the new label */
1088 if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1088,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1089 ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm)(PetscAllreduceBarrierCheck(comm,1,1089,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&lisOutput),(&isOutput),(1),(MPIU_BOOL
),((MPI_Op)(0x58000005)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1089,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1090 ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1090,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1091 ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1091,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1092 }
1093 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeLabels].active) ? (*PetscLogPLE)((DMPLEX_DistributeLabels
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1093,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1094 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1095}
1096
1097static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1098{
1099 DM_Plex *mesh = (DM_Plex*) dm->data;
1100 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data;
1101 PetscBool *isHybrid, *isHybridParallel;
1102 PetscInt dim, depth, d;
1103 PetscInt pStart, pEnd, pStartP, pEndP;
1104 PetscErrorCode ierr;
1105
1106 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1106; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1107 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1107,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1107,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1107,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1107,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1108 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1108,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1108,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1108,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1108,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,3); } } while (0)
;
1109
1110 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1110,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1111 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1111,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1112 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1112,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1113 ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1113,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1114 ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel)PetscMallocA(2,PETSC_TRUE,1114,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(pEnd-pStart)*sizeof(**(&isHybrid)),(&isHybrid
),(size_t)(pEndP-pStartP)*sizeof(**(&isHybridParallel)),(
&isHybridParallel))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1114,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1115 for (d = 0; d <= depth; d++) {
1116 PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1117
1118 if (hybridMax >= 0) {
1119 PetscInt sStart, sEnd, p;
1120
1121 ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1121,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1122 for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1123 }
1124 }
1125 ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1125,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1126 ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1126,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1127 for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1128 for (d = 0; d <= depth; d++) {
1129 PetscInt sStart, sEnd, p, dd;
1130
1131 ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1131,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1132 dd = (depth == 1 && d == 1) ? dim : d;
1133 for (p = sStart; p < sEnd; p++) {
1134 if (isHybridParallel[p-pStartP]) {
1135 pmesh->hybridPointMax[dd] = p;
1136 break;
1137 }
1138 }
1139 }
1140 ierr = PetscFree2(isHybrid,isHybridParallel)PetscFreeA(2,1140,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,&(isHybrid),&(isHybridParallel))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1140,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1141 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1142}
1143
1144static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1145{
1146 DM_Plex *mesh = (DM_Plex*) dm->data;
1147 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data;
1148 MPI_Comm comm;
1149 DM refTree;
1150 PetscSection origParentSection, newParentSection;
1151 PetscInt *origParents, *origChildIDs;
1152 PetscBool flg;
1153 PetscErrorCode ierr;
1154
1155 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1155; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1156 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1156,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1156,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1156,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1156,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1157 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1157,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",5); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1157,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1157,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),1157,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,5); } } while (0)
;
1158 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1158,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1159
1160 /* Set up tree */
1161 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1161,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1162 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1162,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1163 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL((void*)0),NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1163,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1164 if (origParentSection) {
1165 PetscInt pStart, pEnd;
1166 PetscInt *newParents, *newChildIDs, *globParents;
1167 PetscInt *remoteOffsetsParents, newParentSize;
1168 PetscSF parentSF;
1169
1170 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1170,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1171 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1171,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1172 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1172,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1173 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1173,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1174 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1174,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1175 ierr = PetscFree(remoteOffsetsParents)((*PetscTrFree)((void*)(remoteOffsetsParents),1175,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsetsParents) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1175,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1176 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1176,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1177 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs)PetscMallocA(2,PETSC_FALSE,1177,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(newParentSize)*sizeof(**(&newParents)),(&newParents
),(size_t)(newParentSize)*sizeof(**(&newChildIDs)),(&
newChildIDs))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1177,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1178 if (original) {
1179 PetscInt numParents;
1180
1181 ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1181,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1182 ierr = PetscMalloc1(numParents,&globParents)PetscMallocA(1,PETSC_FALSE,1182,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numParents)*sizeof(**(&globParents)),(&globParents
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1182,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1183 ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1183,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1184 }
1185 else {
1186 globParents = origParents;
1187 }
1188 ierr = PetscSFBcastBegin(parentSF, MPIU_INT((MPI_Datatype)0x4c000405), globParents, newParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1188,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1189 ierr = PetscSFBcastEnd(parentSF, MPIU_INT((MPI_Datatype)0x4c000405), globParents, newParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1189,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1190 if (original) {
1191 ierr = PetscFree(globParents)((*PetscTrFree)((void*)(globParents),1191,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((globParents) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1191,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1192 }
1193 ierr = PetscSFBcastBegin(parentSF, MPIU_INT((MPI_Datatype)0x4c000405), origChildIDs, newChildIDs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1193,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1194 ierr = PetscSFBcastEnd(parentSF, MPIU_INT((MPI_Datatype)0x4c000405), origChildIDs, newChildIDs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1194,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1195 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL((void*)0), newParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1195,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1196#if defined(PETSC_USE_DEBUG1)
1197 {
1198 PetscInt p;
1199 PetscBool valid = PETSC_TRUE;
1200 for (p = 0; p < newParentSize; ++p) {
1201 if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Point %d not in overlap SF\n", p);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1201,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1202 }
1203 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map")return PetscError(((MPI_Comm)0x44000001),1203,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,63,PETSC_ERROR_INITIAL,"Invalid global to local map")
;
1204 }
1205#endif
1206 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1206,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1207 if (flg) {
1208 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1208,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1209 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1209,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1210 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1210,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1211 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1211,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1212 ierr = PetscSFView(parentSF, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1212,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1213 }
1214 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1214,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1215 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1215,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1216 ierr = PetscFree2(newParents,newChildIDs)PetscFreeA(2,1216,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,&(newParents),&(newChildIDs))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1216,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1217 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1217,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1218 }
1219 pmesh->useAnchors = mesh->useAnchors;
1220 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1221}
1222
1223PETSC_UNUSED__attribute((unused)) static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1224{
1225 PetscMPIInt rank, size;
1226 MPI_Comm comm;
1227 PetscErrorCode ierr;
1228
1229 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1229; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1230 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1230,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1230,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1230,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1230,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1231 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1231,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1231,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1231,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1231,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,3); } } while (0)
;
1232
1233 /* Create point SF for parallel mesh */
1234 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeSF].active) ? (*PetscLogPLB)((DMPLEX_DistributeSF
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1234,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1235 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1235,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1236 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1236,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1237 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1237,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1238 {
1239 const PetscInt *leaves;
1240 PetscSFNode *remotePoints, *rowners, *lowners;
1241 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1242 PetscInt pStart, pEnd;
1243
1244 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1244,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1245 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1245,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1246 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners)PetscMallocA(2,PETSC_FALSE,1246,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numRoots)*sizeof(**(&rowners)),(&rowners),(
size_t)(numLeaves)*sizeof(**(&lowners)),(&lowners))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1246,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1247 for (p=0; p<numRoots; p++) {
1248 rowners[p].rank = -1;
1249 rowners[p].index = -1;
1250 }
1251 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rowners, lowners);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1251,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1252 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rowners, lowners);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1252,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1253 for (p = 0; p < numLeaves; ++p) {
1254 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1255 lowners[p].rank = rank;
1256 lowners[p].index = leaves ? leaves[p] : p;
1257 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1258 lowners[p].rank = -2;
1259 lowners[p].index = -2;
1260 }
1261 }
1262 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1263 rowners[p].rank = -3;
1264 rowners[p].index = -3;
1265 }
1266 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), lowners, rowners, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1266,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1267 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), lowners, rowners, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1267,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1268 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rowners, lowners);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1268,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1269 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rowners, lowners);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1269,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1270 for (p = 0; p < numLeaves; ++p) {
1271 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed")return PetscError(((MPI_Comm)0x44000001),1271,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Cell partition corrupt: point not claimed"
)
;
1272 if (lowners[p].rank != rank) ++numGhostPoints;
1273 }
1274 ierr = PetscMalloc1(numGhostPoints, &ghostPoints)PetscMallocA(1,PETSC_FALSE,1274,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numGhostPoints)*sizeof(**(&ghostPoints)),(&
ghostPoints))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1274,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1275 ierr = PetscMalloc1(numGhostPoints, &remotePoints)PetscMallocA(1,PETSC_FALSE,1275,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numGhostPoints)*sizeof(**(&remotePoints)),(&
remotePoints))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1275,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1276 for (p = 0, gp = 0; p < numLeaves; ++p) {
1277 if (lowners[p].rank != rank) {
1278 ghostPoints[gp] = leaves ? leaves[p] : p;
1279 remotePoints[gp].rank = lowners[p].rank;
1280 remotePoints[gp].index = lowners[p].index;
1281 ++gp;
1282 }
1283 }
1284 ierr = PetscFree2(rowners,lowners)PetscFreeA(2,1284,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,&(rowners),&(lowners))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1284,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1285 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1285,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1286 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1286,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1287 }
1288 {
1289 PetscBool useCone, useClosure, useAnchors;
1290
1291 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1291,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1292 ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1292,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1293 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1293,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1294 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1294,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1295 }
1296 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeSF].active) ? (*PetscLogPLE)((DMPLEX_DistributeSF
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1296,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1297 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1298}
1299
1300/*@
1301 DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1302
1303 Input Parameters:
1304+ dm - The DMPlex object
1305- flg - Balance the partition?
1306
1307 Level: intermediate
1308
1309.seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1310@*/
1311PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1312{
1313 DM_Plex *mesh = (DM_Plex *)dm->data;
1314
1315 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1315; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1316 mesh->partitionBalance = flg;
1317 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1318}
1319
1320/*@
1321 DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1322
1323 Input Parameter:
1324. dm - The DMPlex object
1325
1326 Output Parameter:
1327. flg - Balance the partition?
1328
1329 Level: intermediate
1330
1331.seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1332@*/
1333PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1334{
1335 DM_Plex *mesh = (DM_Plex *)dm->data;
1336
1337 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1337; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1338 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1338,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1338,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1338,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1338,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1339 PetscValidBoolPointer(flg, 2)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),1339,
__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),1339,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,2); } while (0)
;
1340 *flg = mesh->partitionBalance;
1341 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1342}
1343
1344/*@C
1345 DMPlexCreatePointSF - Build a point SF from an SF describing a point migration
1346
1347 Input Parameter:
1348+ dm - The source DMPlex object
1349. migrationSF - The star forest that describes the parallel point remapping
1350. ownership - Flag causing a vote to determine point ownership
1351
1352 Output Parameter:
1353- pointSF - The star forest describing the point overlap in the remapped DM
1354
1355 Level: developer
1356
1357.seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1358@*/
1359PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1360{
1361 PetscMPIInt rank, size;
1362 PetscInt p, nroots, nleaves, idx, npointLeaves;
1363 PetscInt *pointLocal;
1364 const PetscInt *leaves;
1365 const PetscSFNode *roots;
1366 PetscSFNode *rootNodes, *leafNodes, *pointRemote;
1367 Vec shifts;
1368 const PetscInt numShifts = 13759;
1369 const PetscScalar *shift = NULL((void*)0);
1370 const PetscBool shiftDebug = PETSC_FALSE;
1371 PetscBool balance;
1372 PetscErrorCode ierr;
1373
1374 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1374; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1375 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1375,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1375,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1375,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1375,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1376 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1376,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1377 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1377,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1378 ierr = PetscLogEventBegin(DMPLEX_CreatePointSF,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_CreatePointSF].active) ? (*PetscLogPLB)((DMPLEX_CreatePointSF
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1378,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1379
1380 ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1380,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1381 ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1381,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1382 ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes)PetscMallocA(2,PETSC_FALSE,1382,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nroots)*sizeof(**(&rootNodes)),(&rootNodes)
,(size_t)(nleaves)*sizeof(**(&leafNodes)),(&leafNodes
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1382,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1383 if (ownership) {
1384 /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1385 if (balance) {
1386 PetscRandom r;
1387
1388 ierr = PetscRandomCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), &r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1388,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1389 ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1389,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1390 ierr = VecCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), &shifts);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1390,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1391 ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1391,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1392 ierr = VecSetType(shifts, VECSTANDARD"standard");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1392,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1393 ierr = VecSetRandom(shifts, r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1393,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1394 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1394,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1395 ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1395,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1396 }
1397
1398 /* Point ownership vote: Process with highest rank owns shared points */
1399 for (p = 0; p < nleaves; ++p) {
1400 if (shiftDebug) {
1401 ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts])(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts])(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1401,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1402 }
1403 /* Either put in a bid or we know we own it */
1404 leafNodes[p].rank = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts])(shift[roots[p].index%numShifts]) : 0))%size;
1405 leafNodes[p].index = p;
1406 }
1407 for (p = 0; p < nroots; p++) {
1408 /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1409 rootNodes[p].rank = -3;
1410 rootNodes[p].index = -3;
1411 }
1412 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), leafNodes, rootNodes, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1412,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1413 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), leafNodes, rootNodes, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1413,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1414 if (balance) {
1415 /* We've voted, now we need to get the rank. When we're balancing the partition, the "rank" in rootNotes is not
1416 * the rank but rather (rank + random)%size. So we do another reduction, voting the same way, but sending the
1417 * rank instead of the index. */
1418 PetscSFNode *rootRanks = NULL((void*)0);
1419 ierr = PetscMalloc1(nroots, &rootRanks)PetscMallocA(1,PETSC_FALSE,1419,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nroots)*sizeof(**(&rootRanks)),(&rootRanks)
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1419,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1420 for (p = 0; p < nroots; p++) {
1421 rootRanks[p].rank = -3;
1422 rootRanks[p].index = -3;
1423 }
1424 for (p = 0; p < nleaves; p++) leafNodes[p].index = rank;
1425 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), leafNodes, rootRanks, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1425,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1426 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), leafNodes, rootRanks, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1426,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1427 for (p = 0; p < nroots; p++) rootNodes[p].rank = rootRanks[p].index;
1428 ierr = PetscFree(rootRanks)((*PetscTrFree)((void*)(rootRanks),1428,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((rootRanks) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1428,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1429 }
1430 } else {
1431 for (p = 0; p < nroots; p++) {
1432 rootNodes[p].index = -1;
1433 rootNodes[p].rank = rank;
1434 };
1435 for (p = 0; p < nleaves; p++) {
1436 /* Write new local id into old location */
1437 if (roots[p].rank == rank) {
1438 rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1439 }
1440 }
1441 }
1442 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rootNodes, leafNodes);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1442,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1443 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rootNodes, leafNodes);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1443,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1444
1445 for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1446 if (leafNodes[p].rank != rank) npointLeaves++;
1447 }
1448 ierr = PetscMalloc1(npointLeaves, &pointLocal)PetscMallocA(1,PETSC_FALSE,1448,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(npointLeaves)*sizeof(**(&pointLocal)),(&pointLocal
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1448,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1449 ierr = PetscMalloc1(npointLeaves, &pointRemote)PetscMallocA(1,PETSC_FALSE,1449,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(npointLeaves)*sizeof(**(&pointRemote)),(&pointRemote
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1449,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1450 for (idx = 0, p = 0; p < nleaves; p++) {
1451 if (leafNodes[p].rank != rank) {
1452 pointLocal[idx] = p;
1453 pointRemote[idx] = leafNodes[p];
1454 idx++;
1455 }
1456 }
1457 if (shift) {
1458 ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1458,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1459 ierr = VecDestroy(&shifts);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1459,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1460 }
1461 if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1461,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1462 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1462,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1463 ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1463,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1464 ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1464,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1465 ierr = PetscFree2(rootNodes, leafNodes)PetscFreeA(2,1465,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,&(rootNodes),&(leafNodes))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1465,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1466 ierr = PetscLogEventEnd(DMPLEX_CreatePointSF,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_CreatePointSF].active) ? (*PetscLogPLE)((DMPLEX_CreatePointSF
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1466,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1467 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1468}
1469
1470/*@C
1471 DMPlexMigrate - Migrates internal DM data over the supplied star forest
1472
1473 Collective on dm
1474
1475 Input Parameter:
1476+ dm - The source DMPlex object
1477. sf - The star forest communication context describing the migration pattern
1478
1479 Output Parameter:
1480- targetDM - The target DMPlex object
1481
1482 Level: intermediate
1483
1484.seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1485@*/
1486PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1487{
1488 MPI_Comm comm;
1489 PetscInt dim, cdim, nroots;
1490 PetscSF sfPoint;
1491 ISLocalToGlobalMapping ltogMigration;
1492 ISLocalToGlobalMapping ltogOriginal = NULL((void*)0);
1493 PetscBool flg;
1494 PetscErrorCode ierr;
1495
1496 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1496; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1497 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1497,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1497,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1497,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1497,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1498 ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Migrate].active) ? (*PetscLogPLB)((DMPLEX_Migrate),0,
(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1498,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1499 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1499,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1500 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1500,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1501 ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1501,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1502 ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1502,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1503 ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1503,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1504
1505 /* Check for a one-to-all distribution pattern */
1506 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1506,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1507 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL((void*)0), NULL((void*)0), NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1507,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1508 if (nroots >= 0) {
1509 IS isOriginal;
1510 PetscInt n, size, nleaves;
1511 PetscInt *numbering_orig, *numbering_new;
1512
1513 /* Get the original point numbering */
1514 ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1514,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1515 ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1515,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1516 ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1516,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1517 ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1517,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1518 /* Convert to positive global numbers */
1519 for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1520 /* Derive the new local-to-global mapping from the old one */
1521 ierr = PetscSFGetGraph(sf, NULL((void*)0), &nleaves, NULL((void*)0), NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1521,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1522 ierr = PetscMalloc1(nleaves, &numbering_new)PetscMallocA(1,PETSC_FALSE,1522,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nleaves)*sizeof(**(&numbering_new)),(&numbering_new
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1522,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1523 ierr = PetscSFBcastBegin(sf, MPIU_INT((MPI_Datatype)0x4c000405), numbering_orig, numbering_new);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1523,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1524 ierr = PetscSFBcastEnd(sf, MPIU_INT((MPI_Datatype)0x4c000405), numbering_orig, numbering_new);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1524,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1525 ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1525,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1526 ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1526,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1527 ierr = ISDestroy(&isOriginal);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1527,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1528 } else {
1529 /* One-to-all distribution pattern: We can derive LToG from SF */
1530 ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1530,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1531 }
1532 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1532,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1533 if (flg) {
1534 ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1534,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1535 ierr = ISLocalToGlobalMappingView(ltogMigration, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1535,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1536 }
1537 /* Migrate DM data to target DM */
1538 ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1538,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1539 ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1539,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1540 ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1540,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1541 ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1541,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1542 ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1542,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1543 ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1543,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1544 ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1544,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1545 ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Migrate].active) ? (*PetscLogPLE)((DMPLEX_Migrate),0,
(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1545,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1546 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1547}
1548
1549/*@C
1550 DMPlexDistribute - Distributes the mesh and any associated sections.
1551
1552 Collective on dm
1553
1554 Input Parameter:
1555+ dm - The original DMPlex object
1556- overlap - The overlap of partitions, 0 is the default
1557
1558 Output Parameter:
1559+ sf - The PetscSF used for point distribution, or NULL if not needed
1560- dmParallel - The distributed DMPlex object
1561
1562 Note: If the mesh was not distributed, the output dmParallel will be NULL.
1563
1564 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1565 representation on the mesh.
1566
1567 Level: intermediate
1568
1569.seealso: DMPlexCreate(), DMSetAdjacency()
1570@*/
1571PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1572{
1573 MPI_Comm comm;
1574 PetscPartitioner partitioner;
1575 IS cellPart;
1576 PetscSection cellPartSection;
1577 DM dmCoord;
1578 DMLabel lblPartition, lblMigration;
1579 PetscSF sfMigration, sfStratified, sfPoint;
1580 PetscBool flg, balance;
1581 PetscMPIInt rank, size;
1582 PetscErrorCode ierr;
1583
1584 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1584; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1585 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1585,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1585,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1585,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1585,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1586 PetscValidLogicalCollectiveInt(dm, overlap, 2)do { PetscErrorCode _7_ierr; PetscInt b1[2],b2[2]; b1[0] = -overlap
; b1[1] = overlap; _7_ierr = (PetscAllreduceBarrierCheck(PetscObjectComm
((PetscObject)dm),2,1586,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)dm))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)dm)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),1586,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)dm),1586,
__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Int value must be same on all processes, argument # %d"
,2); } while (0)
;
1587 if (sf) PetscValidPointer(sf,3)do { if (!sf) return PetscError(((MPI_Comm)0x44000001),1587,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(sf,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),1587,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
1
Assuming 'sf' is null
2
Taking false branch
1588 PetscValidPointer(dmParallel,4)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1588,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(dmParallel,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),1588,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
1589
1590 if (sf) *sf = NULL((void*)0);
3
Taking false branch
1591 *dmParallel = NULL((void*)0);
1592 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1592,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1593 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1593,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1594 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1594,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1595 if (size == 1) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4
Assuming 'size' is not equal to 1
5
Taking false branch
1596
1597 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Distribute].active) ? (*PetscLogPLB)((DMPLEX_Distribute
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1597,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1598 /* Create cell partition */
1599 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Partition].active) ? (*PetscLogPLB)((DMPLEX_Partition
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1599,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1600 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1600,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1601 ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1601,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1602 ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1602,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1603 ierr = PetscLogEventBegin(DMPLEX_PartSelf,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_PartSelf].active) ? (*PetscLogPLB)((DMPLEX_PartSelf),
0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1603,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1604 {
1605 /* Convert partition to DMLabel */
1606 IS is;
1607 PetscHSetI ht;
1608 const PetscInt *points;
1609 PetscInt *iranks;
1610 PetscInt pStart, pEnd, proc, npoints, poff = 0, nranks;
1611
1612 ierr = DMLabelCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Point Partition", &lblPartition);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1612,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1613 /* Preallocate strata */
1614 ierr = PetscHSetICreate(&ht);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1614,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6
Calling 'PetscHSetICreate'
9
Returning from 'PetscHSetICreate'
1615 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1615,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1616 for (proc = pStart; proc < pEnd; proc++) {
10
Loop condition is true. Entering loop body
13
Loop condition is true. Entering loop body
16
Loop condition is true. Entering loop body
19
Loop condition is true. Entering loop body
1617 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1617,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1618 if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1618,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
11
Assuming 'npoints' is 0
12
Taking false branch
14
Assuming 'npoints' is 0
15
Taking false branch
17
Assuming 'npoints' is 0
18
Taking false branch
20
Assuming 'npoints' is not equal to 0
21
Taking true branch
22
Calling 'PetscHSetIAdd'
1619 }
1620 ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1620,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1621 ierr = PetscMalloc1(nranks, &iranks)PetscMallocA(1,PETSC_FALSE,1621,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nranks)*sizeof(**(&iranks)),(&iranks))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1621,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1622 ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1622,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1623 ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1623,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1624 ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1624,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1625 ierr = PetscFree(iranks)((*PetscTrFree)((void*)(iranks),1625,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((iranks) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1625,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1626 /* Inline DMPlexPartitionLabelClosure() */
1627 ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1627,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1628 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1628,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1629 for (proc = pStart; proc < pEnd; proc++) {
1630 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1630,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1631 if (!npoints) continue;
1632 ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1632,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1633 ierr = DMPlexClosurePoints_Private(dm, npoints, points+poff, &is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1633,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1634 ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1634,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1635 ierr = ISDestroy(&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1635,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1636 }
1637 ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1637,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1638 }
1639 ierr = PetscLogEventEnd(DMPLEX_PartSelf,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_PartSelf].active) ? (*PetscLogPLE)((DMPLEX_PartSelf),
0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1639,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1640
1641 ierr = DMLabelCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Point migration", &lblMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1641,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1642 ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL((void*)0), lblMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1642,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1643 ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1643,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1644 ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1644,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1645 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1645,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1646 sfMigration = sfStratified;
1647 ierr = PetscSFSetUp(sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1647,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1648 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Partition].active) ? (*PetscLogPLE)((DMPLEX_Partition
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1648,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1649 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1649,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1650 if (flg) {
1651 ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1651,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1652 ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_(comm));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1652,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1653 }
1654
1655 /* Create non-overlapping parallel DM and migrate internal data */
1656 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1656,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1657 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1657,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1658 ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1658,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1659
1660 /* Build the point SF without overlap */
1661 ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1661,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1662 ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1662,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1663 ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1663,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1664 ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1664,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1665 ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1665,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1666 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1666,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1667 if (flg) {ierr = PetscSFView(sfPoint, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1667,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1668
1669 if (overlap > 0) {
1670 DM dmOverlap;
1671 PetscInt nroots, nleaves, noldleaves, l;
1672 const PetscInt *oldLeaves;
1673 PetscSFNode *newRemote, *permRemote;
1674 const PetscSFNode *oldRemote;
1675 PetscSF sfOverlap, sfOverlapPoint;
1676
1677 /* Add the partition overlap to the distributed DM */
1678 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1678,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1679 ierr = DMDestroy(dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1679,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1680 *dmParallel = dmOverlap;
1681 if (flg) {
1682 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1682,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1683 ierr = PetscSFView(sfOverlap, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1683,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1684 }
1685
1686 /* Re-map the migration SF to establish the full migration pattern */
1687 ierr = PetscSFGetGraph(sfMigration, &nroots, &noldleaves, &oldLeaves, &oldRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1687,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1688 ierr = PetscSFGetGraph(sfOverlap, NULL((void*)0), &nleaves, NULL((void*)0), NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1688,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1689 ierr = PetscMalloc1(nleaves, &newRemote)PetscMallocA(1,PETSC_FALSE,1689,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nleaves)*sizeof(**(&newRemote)),(&newRemote
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1689,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1690 /* oldRemote: original root point mapping to original leaf point
1691 newRemote: original leaf point mapping to overlapped leaf point */
1692 if (oldLeaves) {
1693 /* After stratification, the migration remotes may not be in root (canonical) order, so we reorder using the leaf numbering */
1694 ierr = PetscMalloc1(noldleaves, &permRemote)PetscMallocA(1,PETSC_FALSE,1694,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(noldleaves)*sizeof(**(&permRemote)),(&permRemote
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1694,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1695 for (l = 0; l < noldleaves; ++l) permRemote[oldLeaves[l]] = oldRemote[l];
1696 oldRemote = permRemote;
1697 }
1698 ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT((MPI_Datatype)0x4c000816), oldRemote, newRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1698,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1699 ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT((MPI_Datatype)0x4c000816), oldRemote, newRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1699,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1700 if (oldLeaves) {ierr = PetscFree(oldRemote)((*PetscTrFree)((void*)(oldRemote),1700,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
) || ((oldRemote) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1700,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1701 ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1701,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1702 ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL((void*)0), PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1702,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1703 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1703,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1704 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1704,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1705 sfMigration = sfOverlapPoint;
1706 }
1707 /* Cleanup Partition */
1708 ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1708,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1709 ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1709,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1710 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1710,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1711 ierr = ISDestroy(&cellPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1711,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1712 /* Copy BC */
1713 ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1713,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1714 /* Create sfNatural */
1715 if (dm->useNatural) {
1716 PetscSection section;
1717
1718 ierr = DMGetSection(dm, &section);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1718,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1719 ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1719,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1720 ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1720,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1721 }
1722 /* Cleanup */
1723 if (sf) {*sf = sfMigration;}
1724 else {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1724,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1725 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1725,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1726 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Distribute].active) ? (*PetscLogPLE)((DMPLEX_Distribute
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1726,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1727 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1728}
1729
1730/*@C
1731 DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1732
1733 Collective on dm
1734
1735 Input Parameter:
1736+ dm - The non-overlapping distrbuted DMPlex object
1737- overlap - The overlap of partitions
1738
1739 Output Parameter:
1740+ sf - The PetscSF used for point distribution
1741- dmOverlap - The overlapping distributed DMPlex object, or NULL
1742
1743 Note: If the mesh was not distributed, the return value is NULL.
1744
1745 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1746 representation on the mesh.
1747
1748 Level: intermediate
1749
1750.seealso: DMPlexCreate(), DMSetAdjacency()
1751@*/
1752PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1753{
1754 MPI_Comm comm;
1755 PetscMPIInt size, rank;
1756 PetscSection rootSection, leafSection;
1757 IS rootrank, leafrank;
1758 DM dmCoord;
1759 DMLabel lblOverlap;
1760 PetscSF sfOverlap, sfStratified, sfPoint;
1761 PetscErrorCode ierr;
1762
1763 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1763; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1764 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1764,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1764,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1764,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1764,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1765 if (sf) PetscValidPointer(sf, 3)do { if (!sf) return PetscError(((MPI_Comm)0x44000001),1765,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(sf,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),1765,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
1766 PetscValidPointer(dmOverlap, 4)do { if (!dmOverlap) return PetscError(((MPI_Comm)0x44000001)
,1766,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(dmOverlap,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),1766,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
1767
1768 if (sf) *sf = NULL((void*)0);
1769 *dmOverlap = NULL((void*)0);
1770 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1770,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1771 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1771,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1772 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1772,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1773 if (size == 1) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1774
1775 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeOverlap].active) ? (*PetscLogPLB)((DMPLEX_DistributeOverlap
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1775,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1776 /* Compute point overlap with neighbouring processes on the distributed DM */
1777 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Partition].active) ? (*PetscLogPLB)((DMPLEX_Partition
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1777,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1778 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1778,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1779 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1779,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1780 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1780,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1781 ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1781,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1782 /* Convert overlap label to stratified migration SF */
1783 ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1783,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1784 ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1784,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1785 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1785,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1786 sfOverlap = sfStratified;
1787 ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1787,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1788 ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1788,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1789
1790 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1790,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1791 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1791,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1792 ierr = ISDestroy(&rootrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1792,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1793 ierr = ISDestroy(&leafrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1793,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1794 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Partition].active) ? (*PetscLogPLE)((DMPLEX_Partition
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1794,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1795
1796 /* Build the overlapping DM */
1797 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1797,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1798 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1798,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1799 ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1799,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1800 /* Build the new point SF */
1801 ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1801,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1802 ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1802,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1803 ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1803,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1804 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1804,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1805 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1805,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1806 /* Cleanup overlap partition */
1807 ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1807,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1808 if (sf) *sf = sfOverlap;
1809 else {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1809,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1810 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeOverlap].active) ? (*PetscLogPLE)((DMPLEX_DistributeOverlap
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1810,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1811 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1812}
1813
1814/*@C
1815 DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1816 root process of the original's communicator.
1817
1818 Collective on dm
1819
1820 Input Parameters:
1821. dm - the original DMPlex object
1822
1823 Output Parameters:
1824+ sf - the PetscSF used for point distribution (optional)
1825- gatherMesh - the gathered DM object, or NULL
1826
1827 Level: intermediate
1828
1829.seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1830@*/
1831PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1832{
1833 MPI_Comm comm;
1834 PetscMPIInt size;
1835 PetscPartitioner oldPart, gatherPart;
1836 PetscErrorCode ierr;
1837
1838 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1838; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1839 PetscValidHeaderSpecific(dm,DM_CLASSID,1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1839,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1839,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1839,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1839,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1840 PetscValidPointer(gatherMesh,2)do { if (!gatherMesh) return PetscError(((MPI_Comm)0x44000001
),1840,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(gatherMesh,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),1840,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
1841 *gatherMesh = NULL((void*)0);
1842 if (sf) *sf = NULL((void*)0);
1843 comm = PetscObjectComm((PetscObject)dm);
1844 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1844,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1845 if (size == 1) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1846 ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1846,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1847 ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1847,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1848 ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1848,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1849 ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER"gather");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1849,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1850 ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1850,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1851 ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1851,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1852
1853 ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1853,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1854 ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1854,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1855 ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1855,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1856 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1857}
1858
1859/*@C
1860 DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1861
1862 Collective on dm
1863
1864 Input Parameters:
1865. dm - the original DMPlex object
1866
1867 Output Parameters:
1868+ sf - the PetscSF used for point distribution (optional)
1869- redundantMesh - the redundant DM object, or NULL
1870
1871 Level: intermediate
1872
1873.seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1874@*/
1875PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1876{
1877 MPI_Comm comm;
1878 PetscMPIInt size, rank;
1879 PetscInt pStart, pEnd, p;
1880 PetscInt numPoints = -1;
1881 PetscSF migrationSF, sfPoint, gatherSF;
1882 DM gatherDM, dmCoord;
1883 PetscSFNode *points;
1884 PetscErrorCode ierr;
1885
1886 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1886; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1887 PetscValidHeaderSpecific(dm,DM_CLASSID,1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1887,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1887,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1887,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1887,__func__
,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1888 PetscValidPointer(redundantMesh,2)do { if (!redundantMesh) return PetscError(((MPI_Comm)0x44000001
),1888,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(redundantMesh,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),1888,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
1889 *redundantMesh = NULL((void*)0);
1890 comm = PetscObjectComm((PetscObject)dm);
1891 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1891,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1892 if (size == 1) {
1893 ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1893,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1894 *redundantMesh = dm;
1895 if (sf) *sf = NULL((void*)0);
1896 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1897 }
1898 ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1898,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1899 if (!gatherDM) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1900 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1900,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1901 ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1901,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1902 numPoints = pEnd - pStart;
1903 ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm)((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) || MPI_Bcast
((&numPoints),(1),(((MPI_Datatype)0x4c000405)),(0),(comm)
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1903,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1904 ierr = PetscMalloc1(numPoints,&points)PetscMallocA(1,PETSC_FALSE,1904,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numPoints)*sizeof(**(&points)),(&points))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1904,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1905 ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1905,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1906 for (p = 0; p < numPoints; p++) {
1907 points[p].index = p;
1908 points[p].rank = 0;
1909 }
1910 ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL((void*)0),PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1910,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1911 ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1911,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1912 ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1912,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1913 ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1913,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1914 ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1914,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1915 ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1915,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1916 ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1916,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1917 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1917,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1918 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1918,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1919 if (sf) {
1920 PetscSF tsf;
1921
1922 ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1922,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1923 ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1923,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1924 ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1924,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1925 }
1926 ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1926,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1927 ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1927,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1928 ierr = DMDestroy(&gatherDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1928,__func__,"/sandbox/petsc/petsc.next/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1929 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1930}

/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h

1#if !defined(PETSC_HASHSETI_H)
2#define PETSC_HASHSETI_H
3
4#include <petsc/private/hashset.h>
5
6PETSC_HASH_SET(HSetI, PetscInt, PetscHashInt, PetscHashEqual)typedef struct kh_HSetI_s { khint_t n_buckets, size, n_occupied
, upper_bound; khint32_t *flags; PetscInt *keys; char *vals; }
kh_HSetI_t; static inline __attribute((unused)) kh_HSetI_t *
kh_init_HSetI(void) { return (kh_HSetI_t*)calloc(1,sizeof(kh_HSetI_t
)); } static inline __attribute((unused)) void kh_destroy_HSetI
(kh_HSetI_t *h) { if (h) { free((void *)h->keys); free(h->
flags); free((void *)h->vals); free(h); } } static inline __attribute
((unused)) void kh_clear_HSetI(kh_HSetI_t *h) { if (h &&
h->flags) { memset(h->flags, 0xaa, ((h->n_buckets) <
16? 1 : (h->n_buckets)>>4) * sizeof(khint32_t)); h->
size = h->n_occupied = 0; } } static inline __attribute((unused
)) khint_t kh_get_HSetI(const kh_HSetI_t *h, PetscInt key) { if
(h->n_buckets) { khint_t k, i, last, mask, step = 0; mask
= h->n_buckets - 1; k = PetscHashInt(key); i = k & mask
; last = i; while (!((h->flags[i>>4]>>((i&
0xfU)<<1))&2) && (((h->flags[i>>4]
>>((i&0xfU)<<1))&1) || !((h->keys[i]) ==
(key)))) { i = (i + (++step)) & mask; if (i == last) return
h->n_buckets; } return ((h->flags[i>>4]>>(
(i&0xfU)<<1))&3)? h->n_buckets : i; } else return
0; } static inline __attribute((unused)) int kh_resize_HSetI
(kh_HSetI_t *h, khint_t new_n_buckets) { khint32_t *new_flags
= 0; khint_t j = 1; { (--(new_n_buckets), (new_n_buckets)|=(
new_n_buckets)>>1, (new_n_buckets)|=(new_n_buckets)>>
2, (new_n_buckets)|=(new_n_buckets)>>4, (new_n_buckets)
|=(new_n_buckets)>>8, (new_n_buckets)|=(new_n_buckets)>>
16, ++(new_n_buckets)); if (new_n_buckets < 4) new_n_buckets
= 4; if (h->size >= (khint_t)(new_n_buckets * __ac_HASH_UPPER
+ 0.5)) j = 0; else { new_flags = (khint32_t*)malloc(((new_n_buckets
) < 16? 1 : (new_n_buckets)>>4) * sizeof(khint32_t))
; if (!new_flags) return -1; memset(new_flags, 0xaa, ((new_n_buckets
) < 16? 1 : (new_n_buckets)>>4) * sizeof(khint32_t))
; if (h->n_buckets < new_n_buckets) { PetscInt *new_keys
= (PetscInt*)realloc((void *)h->keys,new_n_buckets * sizeof
(PetscInt)); if (!new_keys) { free(new_flags); return -1; } h
->keys = new_keys; if (0) { char *new_vals = (char*)realloc
((void *)h->vals,new_n_buckets * sizeof(char)); if (!new_vals
) { free(new_flags); return -1; } h->vals = new_vals; } } }
} if (j) { for (j = 0; j != h->n_buckets; ++j) { if (((h->
flags[j>>4]>>((j&0xfU)<<1))&3) == 0
) { PetscInt key = h->keys[j]; char val; khint_t new_mask;
new_mask = new_n_buckets - 1; if (0) val = h->vals[j]; (h
->flags[j>>4]|=1ul<<((j&0xfU)<<1)); while
(1) { khint_t k, i, step = 0; k = PetscHashInt(key); i = k &
new_mask; while (!((new_flags[i>>4]>>((i&0xfU
)<<1))&2)) i = (i + (++step)) & new_mask; (new_flags
[i>>4]&=~(2ul<<((i&0xfU)<<1))); if (
i < h->n_buckets && ((h->flags[i>>4]>>
((i&0xfU)<<1))&3) == 0) { { PetscInt tmp = h->
keys[i]; h->keys[i] = key; key = tmp; } if (0) { char tmp =
h->vals[i]; h->vals[i] = val; val = tmp; } (h->flags
[i>>4]|=1ul<<((i&0xfU)<<1)); } else { h
->keys[i] = key; if (0) h->vals[i] = val; break; } } } }
if (h->n_buckets > new_n_buckets) { h->keys = (PetscInt
*)realloc((void *)h->keys,new_n_buckets * sizeof(PetscInt)
); if (0) h->vals = (char*)realloc((void *)h->vals,new_n_buckets
* sizeof(char)); } free(h->flags); h->flags = new_flags
; h->n_buckets = new_n_buckets; h->n_occupied = h->size
; h->upper_bound = (khint_t)(h->n_buckets * __ac_HASH_UPPER
+ 0.5); } return 0; } static inline __attribute((unused)) khint_t
kh_put_HSetI(kh_HSetI_t *h, PetscInt key, int *ret) { khint_t
x; if (h->n_occupied >= h->upper_bound) { if (h->
n_buckets > (h->size<<1)) { if (kh_resize_HSetI(h
, h->n_buckets - 1) < 0) { *ret = -1; return h->n_buckets
; } } else if (kh_resize_HSetI(h, h->n_buckets + 1) < 0
) { *ret = -1; return h->n_buckets; } } { khint_t k, i, site
, last, mask = h->n_buckets - 1, step = 0; x = site = h->
n_buckets; k = PetscHashInt(key); i = k & mask; if (((h->
flags[i>>4]>>((i&0xfU)<<1))&2)) x =
i; else { last = i; while (!((h->flags[i>>4]>>
((i&0xfU)<<1))&2) && (((h->flags[i>>
4]>>((i&0xfU)<<1))&1) || !((h->keys[i]
) == (key)))) { if (((h->flags[i>>4]>>((i&
0xfU)<<1))&1)) site = i; i = (i + (++step)) & mask
; if (i == last) { x = site; break; } } if (x == h->n_buckets
) { if (((h->flags[i>>4]>>((i&0xfU)<<
1))&2) && site != h->n_buckets) x = site; else
x = i; } } } if (((h->flags[x>>4]>>((x&0xfU
)<<1))&2)) { h->keys[x] = key; (h->flags[x>>
4]&=~(3ul<<((x&0xfU)<<1))); ++h->size;
++h->n_occupied; *ret = 1; } else if (((h->flags[x>>
4]>>((x&0xfU)<<1))&1)) { h->keys[x] = key
; (h->flags[x>>4]&=~(3ul<<((x&0xfU)<<
1))); ++h->size; *ret = 2; } else *ret = 0; return x; } static
inline __attribute((unused)) void kh_del_HSetI(kh_HSetI_t *h
, khint_t x) { if (x != h->n_buckets && !((h->flags
[x>>4]>>((x&0xfU)<<1))&3)) { (h->
flags[x>>4]|=1ul<<((x&0xfU)<<1)); --h->
size; } } typedef kh_HSetI_t *PetscHSetI; static inline __attribute
((unused)) PetscErrorCode PetscHSetICreate(PetscHSetI *ht) { do
{ do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); *ht = kh_init_HSetI(); do { do { ; if (petscstack
&& petscstack->currentsize > 0) { petscstack->
currentsize--; petscstack->function[petscstack->currentsize
] = 0; petscstack->file[petscstack->currentsize] = 0; petscstack
->line[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); } static inline __attribute((unused)) PetscErrorCode
PetscHSetIDestroy(PetscHSetI *ht) { do { do { ; if (petscstack
&& (petscstack->currentsize < 64)) { petscstack
->function[petscstack->currentsize] = __func__; petscstack
->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); if (!*ht) do { do { ; if (petscstack &&
petscstack->currentsize > 0) { petscstack->currentsize
--; petscstack->function[petscstack->currentsize] = 0; petscstack
->file[petscstack->currentsize] = 0; petscstack->line
[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); kh_destroy_HSetI(*ht); *ht = ((void*)0); do {
do { ; if (petscstack && petscstack->currentsize >
0) { petscstack->currentsize--; petscstack->function[petscstack
->currentsize] = 0; petscstack->file[petscstack->currentsize
] = 0; petscstack->line[petscstack->currentsize] = 0; petscstack
->petscroutine[petscstack->currentsize] = PETSC_FALSE; }
if (petscstack) { petscstack->hotdepth = (((petscstack->
hotdepth-1)<(0)) ? (0) : (petscstack->hotdepth-1)); } ;
} while (0); return(0);} while (0); } static inline __attribute
((unused)) PetscErrorCode PetscHSetIReset(PetscHSetI ht) { do
{ do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); { if (ht) { free((ht)->keys); free((ht)->
flags); free((ht)->vals); memset((ht), 0x00, sizeof(*(ht))
); } }; do { do { ; if (petscstack && petscstack->
currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIDuplicate
(PetscHSetI ht,PetscHSetI *hd) { int ret; PetscInt key; do { do
{ ; if (petscstack && (petscstack->currentsize <
64)) { petscstack->function[petscstack->currentsize] =
__func__; petscstack->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!hd) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(hd,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0); *hd = kh_init_HSetI(); ret = kh_resize_HSetI(*hd
, ((ht)->size)); do { if (__builtin_expect(!!(!(ret==0)),0
)) return PetscError(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret==0"
); } while(0); { khint_t __i; for (__i = (khint_t)(0); __i !=
((ht)->n_buckets); ++__i) { if (!(!(((ht)->flags[(__i)
>>4]>>(((__i)&0xfU)<<1))&3))) continue
; (key) = ((ht)->keys[__i]); { kh_put_HSetI(*hd, key, &
ret); do { if (__builtin_expect(!!(!(ret>=0)),0)) return PetscError
(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret>=0"
); } while(0);}; } } do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIUpdate
(PetscHSetI ht,PetscHSetI hta) { int ret; PetscInt key; do { do
{ ; if (petscstack && (petscstack->currentsize <
64)) { petscstack->function[petscstack->currentsize] =
__func__; petscstack->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!hta) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(hta,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0); { khint_t __i; for (__i = (khint_t)(0); __i != (
(hta)->n_buckets); ++__i) { if (!(!(((hta)->flags[(__i)
>>4]>>(((__i)&0xfU)<<1))&3))) continue
; (key) = ((hta)->keys[__i]); { kh_put_HSetI(ht, key, &
ret); do { if (__builtin_expect(!!(!(ret>=0)),0)) return PetscError
(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret>=0"
); } while(0);}; } } do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIClear
(PetscHSetI ht) { do { do { ; if (petscstack && (petscstack
->currentsize < 64)) { petscstack->function[petscstack
->currentsize] = __func__; petscstack->file[petscstack->
currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); kh_clear_HSetI(ht); do { do { ; if (petscstack &&
petscstack->currentsize > 0) { petscstack->currentsize
--; petscstack->function[petscstack->currentsize] = 0; petscstack
->file[petscstack->currentsize] = 0; petscstack->line
[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); } static inline __attribute((unused)) PetscErrorCode
PetscHSetIResize(PetscHSetI ht,PetscInt nb) { int ret; do { do
{ ; if (petscstack && (petscstack->currentsize <
64)) { petscstack->function[petscstack->currentsize] =
__func__; petscstack->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); ret = kh_resize_HSetI(ht, (khint_t)nb); do { if
(__builtin_expect(!!(!(ret==0)),0)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret==0"
); } while(0); do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIGetSize
(PetscHSetI ht,PetscInt *n) { do { do { ; if (petscstack &&
(petscstack->currentsize < 64)) { petscstack->function
[petscstack->currentsize] = __func__; petscstack->file[
petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!n) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(n,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,2); } while (0); *n = (PetscInt)((ht)->size); do { do { ;
if (petscstack && petscstack->currentsize > 0)
{ petscstack->currentsize--; petscstack->function[petscstack
->currentsize] = 0; petscstack->file[petscstack->currentsize
] = 0; petscstack->line[petscstack->currentsize] = 0; petscstack
->petscroutine[petscstack->currentsize] = PETSC_FALSE; }
if (petscstack) { petscstack->hotdepth = (((petscstack->
hotdepth-1)<(0)) ? (0) : (petscstack->hotdepth-1)); } ;
} while (0); return(0);} while (0); } static inline __attribute
((unused)) PetscErrorCode PetscHSetIGetCapacity(PetscHSetI ht
,PetscInt *n) { do { do { ; if (petscstack && (petscstack
->currentsize < 64)) { petscstack->function[petscstack
->currentsize] = __func__; petscstack->file[petscstack->
currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!n) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(n,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,2); } while (0); *n = (PetscInt)((ht)->n_buckets); do { do
{ ; if (petscstack && petscstack->currentsize >
0) { petscstack->currentsize--; petscstack->function[petscstack
->currentsize] = 0; petscstack->file[petscstack->currentsize
] = 0; petscstack->line[petscstack->currentsize] = 0; petscstack
->petscroutine[petscstack->currentsize] = PETSC_FALSE; }
if (petscstack) { petscstack->hotdepth = (((petscstack->
hotdepth-1)<(0)) ? (0) : (petscstack->hotdepth-1)); } ;
} while (0); return(0);} while (0); } static inline __attribute
((unused)) PetscErrorCode PetscHSetIHas(PetscHSetI ht,PetscInt
key,PetscBool *has) { khiter_t iter; do { do { ; if (petscstack
&& (petscstack->currentsize < 64)) { petscstack
->function[petscstack->currentsize] = __func__; petscstack
->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!has) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(has,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0); iter = kh_get_HSetI(ht, key); *has = (iter != (
(ht)->n_buckets)) ? PETSC_TRUE : PETSC_FALSE; do { do { ; if
(petscstack && petscstack->currentsize > 0) { petscstack
->currentsize--; petscstack->function[petscstack->currentsize
] = 0; petscstack->file[petscstack->currentsize] = 0; petscstack
->line[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); } static inline __attribute((unused)) PetscErrorCode
PetscHSetIAdd(PetscHSetI ht,PetscInt key) { int ret; khiter_t
iter; do { do { ; if (petscstack && (petscstack->
currentsize < 64)) { petscstack->function[petscstack->
currentsize] = __func__; petscstack->file[petscstack->currentsize
] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); iter = kh_put_HSetI(ht, key, &ret); (void)iter
; do { if (__builtin_expect(!!(!(ret>=0)),0)) return PetscError
(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret>=0"
); } while(0); do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIDel
(PetscHSetI ht,PetscInt key) { khiter_t iter; do { do { ; if (
petscstack && (petscstack->currentsize < 64)) {
petscstack->function[petscstack->currentsize] = __func__
; petscstack->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); iter = kh_get_HSetI(ht, key); kh_del_HSetI(ht, iter
); do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0); } static
inline __attribute((unused)) PetscErrorCode PetscHSetIQueryAdd
(PetscHSetI ht,PetscInt key,PetscBool *missing) { int ret; khiter_t
iter; do { do { ; if (petscstack && (petscstack->
currentsize < 64)) { petscstack->function[petscstack->
currentsize] = __func__; petscstack->file[petscstack->currentsize
] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!missing) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(missing,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0); iter = kh_put_HSetI(ht, key, &ret); (void)iter
; do { if (__builtin_expect(!!(!(ret>=0)),0)) return PetscError
(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret>=0"
); } while(0); *missing = ret ? PETSC_TRUE : PETSC_FALSE; do {
do { ; if (petscstack && petscstack->currentsize >
0) { petscstack->currentsize--; petscstack->function[petscstack
->currentsize] = 0; petscstack->file[petscstack->currentsize
] = 0; petscstack->line[petscstack->currentsize] = 0; petscstack
->petscroutine[petscstack->currentsize] = PETSC_FALSE; }
if (petscstack) { petscstack->hotdepth = (((petscstack->
hotdepth-1)<(0)) ? (0) : (petscstack->hotdepth-1)); } ;
} while (0); return(0);} while (0); } static inline __attribute
((unused)) PetscErrorCode PetscHSetIQueryDel(PetscHSetI ht,PetscInt
key,PetscBool *present) { khiter_t iter; do { do { ; if (petscstack
&& (petscstack->currentsize < 64)) { petscstack
->function[petscstack->currentsize] = __func__; petscstack
->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!present) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(present,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0); iter = kh_get_HSetI(ht, key); if (iter != ((ht)
->n_buckets)) { kh_del_HSetI(ht, iter); *present = PETSC_TRUE
; } else { *present = PETSC_FALSE; } do { do { ; if (petscstack
&& petscstack->currentsize > 0) { petscstack->
currentsize--; petscstack->function[petscstack->currentsize
] = 0; petscstack->file[petscstack->currentsize] = 0; petscstack
->line[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); } static inline __attribute((unused)) PetscErrorCode
PetscHSetIGetElems(PetscHSetI ht,PetscInt *off,PetscInt array
[]) { PetscInt key; PetscInt pos; do { do { ; if (petscstack &&
(petscstack->currentsize < 64)) { petscstack->function
[petscstack->currentsize] = __func__; petscstack->file[
petscstack->currentsize] = "/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!off) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(off,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,2); } while (0); pos = *off; { khint_t __i; for (__i = (khint_t
)(0); __i != ((ht)->n_buckets); ++__i) { if (!(!(((ht)->
flags[(__i)>>4]>>(((__i)&0xfU)<<1))&
3))) continue; (key) = ((ht)->keys[__i]); array[pos++] = key
; } }; *off = pos; do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); }
7
Within the expansion of the macro 'PETSC_HASH_SET':
a
Assuming 'petscstack' is null
b
Assuming the condition is false
c
Calling 'kh_init_HSetI'
d
Returning from 'kh_init_HSetI'
e
Assuming 'petscstack' is null
8
Within the expansion of the macro 'PETSC_HASH_SET':
a
Null pointer value stored to field 'flags'
23
Within the expansion of the macro 'PETSC_HASH_SET':
a
Assuming 'petscstack' is null
b
Assuming 'ht' is non-null
c
Assuming the condition is false
d
Calling 'kh_put_HSetI'
24
Within the expansion of the macro 'PETSC_HASH_SET':
a
Calling 'kh_resize_HSetI'
b
Returning from 'kh_resize_HSetI'
c
Calling 'PetscHashInt'
d
Returning from 'PetscHashInt'
e
Array access (via field 'flags') results in a null pointer dereference
25
Within the expansion of the macro 'PETSC_HASH_SET':
7
8#endif /* PETSC_HASHSETI_H */

/sandbox/petsc/petsc.next/include/petsc/private/hashtable.h

1#if !defined(PETSC_HASHTABLE_H)
2#define PETSC_HASHTABLE_H
3
4#include <petsc/private/petscimpl.h>
5
6#define kh_inlineinline PETSC_INLINEinline
7#define klib_unused__attribute((unused)) PETSC_UNUSED__attribute((unused))
8#include <petsc/private/kernels/khash.h>
9
10/* Required for khash <= 0.2.5 */
11#if !defined(kcalloc)
12#define kcalloc(N,Z)calloc(N,Z) calloc(N,Z)
13#endif
14#if !defined(kmalloc)
15#define kmalloc(Z)malloc(Z) malloc(Z)
16#endif
17#if !defined(krealloc)
18#define krealloc(P,Z)realloc(P,Z) realloc(P,Z)
19#endif
20#if !defined(kfree)
21#define kfree(P)free(P) free(P)
22#endif
23
24/* --- Useful extensions to khash --- */
25
26#if !defined(kh_reset)
27/*! @function
28 @abstract Reset a hash table to initial state.
29 @param name Name of the hash table [symbol]
30 @param h Pointer to the hash table [khash_t(name)*]
31 */
32#define kh_reset(name, h){ if (h) { free((h)->keys); free((h)->flags); free((h)->
vals); memset((h), 0x00, sizeof(*(h))); } }
{ \
33 if (h) { \
34 kfree((h)->keys)free((h)->keys); kfree((h)->flags)free((h)->flags); \
35 kfree((h)->vals)free((h)->vals); \
36 memset((h), 0x00, sizeof(*(h))); \
37 } }
38#endif /*kh_reset*/
39
40#if !defined(kh_foreach)
41/*! @function
42 @abstract Iterate over the entries in the hash table
43 @param h Pointer to the hash table [khash_t(name)*]
44 @param kvar Variable to which key will be assigned
45 @param vvar Variable to which value will be assigned
46 @param code Block of code to execute
47 */
48#define kh_foreach(h, kvar, vvar, code){ khint_t __i; for (__i = (khint_t)(0); __i != ((h)->n_buckets
); ++__i) { if (!(!(((h)->flags[(__i)>>4]>>(((
__i)&0xfU)<<1))&3))) continue; (kvar) = ((h)->
keys[__i]); (vvar) = ((h)->vals[__i]); code; } }
{ khint_t __i; \
49 for (__i = kh_begin(h)(khint_t)(0); __i != kh_end(h)((h)->n_buckets); ++__i) { \
50 if (!kh_exist(h,__i)(!(((h)->flags[(__i)>>4]>>(((__i)&0xfU)<<
1))&3))
) continue; \
51 (kvar) = kh_key(h,__i)((h)->keys[__i]); \
52 (vvar) = kh_val(h,__i)((h)->vals[__i]); \
53 code; \
54 } }
55#endif /*kh_foreach*/
56
57#if !defined(kh_foreach_key)
58/*! @function
59 @abstract Iterate over the keys in the hash table
60 @param h Pointer to the hash table [khash_t(name)*]
61 @param kvar Variable to which key will be assigned
62 @param code Block of code to execute
63 */
64#define kh_foreach_key(h, kvar, code){ khint_t __i; for (__i = (khint_t)(0); __i != ((h)->n_buckets
); ++__i) { if (!(!(((h)->flags[(__i)>>4]>>(((
__i)&0xfU)<<1))&3))) continue; (kvar) = ((h)->
keys[__i]); code; } }
{ khint_t __i; \
65 for (__i = kh_begin(h)(khint_t)(0); __i != kh_end(h)((h)->n_buckets); ++__i) { \
66 if (!kh_exist(h,__i)(!(((h)->flags[(__i)>>4]>>(((__i)&0xfU)<<
1))&3))
) continue; \
67 (kvar) = kh_key(h,__i)((h)->keys[__i]); \
68 code; \
69 } }
70#endif /*kh_foreach_key*/
71
72#if !defined(kh_foreach_value)
73/*! @function
74 @abstract Iterate over the values in the hash table
75 @param h Pointer to the hash table [khash_t(name)*]
76 @param vvar Variable to which value will be assigned
77 @param code Block of code to execute
78 */
79#define kh_foreach_value(h, vvar, code){ khint_t __i; for (__i = (khint_t)(0); __i != ((h)->n_buckets
); ++__i) { if (!(!(((h)->flags[(__i)>>4]>>(((
__i)&0xfU)<<1))&3))) continue; (vvar) = ((h)->
vals[__i]); code; } }
{ khint_t __i; \
80 for (__i = kh_begin(h)(khint_t)(0); __i != kh_end(h)((h)->n_buckets); ++__i) { \
81 if (!kh_exist(h,__i)(!(((h)->flags[(__i)>>4]>>(((__i)&0xfU)<<
1))&3))
) continue; \
82 (vvar) = kh_val(h,__i)((h)->vals[__i]); \
83 code; \
84 } }
85#endif /*kh_foreach_value*/
86
87
88/* --- Helper macro for error checking --- */
89
90#if defined(PETSC_USE_DEBUG1)
91#define PetscHashAssert(expr)do { if (__builtin_expect(!!(!(expr)),0)) return PetscError((
(MPI_Comm)0x44000001),91,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
); } while(0)
do { \
92 if (PetscUnlikely(!(expr))__builtin_expect(!!(!(expr)),0)) \
93 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, \return PetscError(((MPI_Comm)0x44000001),95,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
)
94 "[khash] Assertion: `%s' failed.", \return PetscError(((MPI_Comm)0x44000001),95,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
)
95 PetscStringize(expr))return PetscError(((MPI_Comm)0x44000001),95,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
)
; \
96} while(0)
97#else
98#define PetscHashAssert(expr)do { if (__builtin_expect(!!(!(expr)),0)) return PetscError((
(MPI_Comm)0x44000001),98,__func__,"/sandbox/petsc/petsc.next/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
); } while(0)
((void)(expr))
99#endif
100
101
102/* --- Low level iterator API --- */
103
104typedef khiter_t PetscHashIter;
105
106#define PetscHashIterBegin(ht,i)do { (i) = (khint_t)(0); if ((i) != (((ht))->n_buckets) &&
!(!((((ht))->flags[((i))>>4]>>((((i))&0xfU
)<<1))&3))) do { ++((i)); } while (((i)) != ((((ht)
))->n_buckets) && !(!(((((ht)))->flags[(((i)))>>
4]>>(((((i)))&0xfU)<<1))&3))); } while (0
)
do { \
107 (i) = kh_begin((ht))(khint_t)(0); \
108 if ((i) != kh_end((ht))(((ht))->n_buckets) && !kh_exist((ht),(i))(!((((ht))->flags[((i))>>4]>>((((i))&0xfU)
<<1))&3))
) \
109 PetscHashIterNext((ht),(i))do { ++((i)); } while (((i)) != ((((ht)))->n_buckets) &&
!(!(((((ht)))->flags[(((i)))>>4]>>(((((i)))&
0xfU)<<1))&3)))
; \
110} while (0)
111
112#define PetscHashIterNext(ht,i)do { ++(i); } while ((i) != (((ht))->n_buckets) &&
!(!((((ht))->flags[((i))>>4]>>((((i))&0xfU
)<<1))&3)))
\
113 do { ++(i); } while ((i) != kh_end((ht))(((ht))->n_buckets) && !kh_exist((ht),(i))(!((((ht))->flags[((i))>>4]>>((((i))&0xfU)
<<1))&3))
)
114
115#define PetscHashIterAtEnd(ht,i)((i) == (((ht))->n_buckets)) ((i) == kh_end((ht))(((ht))->n_buckets))
116
117#define PetscHashIterGetKey(ht,i,k)((k) = (((ht))->keys[(i)])) ((k) = kh_key((ht),(i))(((ht))->keys[(i)]))
118
119#define PetscHashIterGetVal(ht,i,v)((v) = (((ht))->vals[(i)])) ((v) = kh_val((ht),(i))(((ht))->vals[(i)]))
120
121#define PetscHashIterSetVal(ht,i,v)((((ht))->vals[(i)]) = (v)) (kh_val((ht),(i))(((ht))->vals[(i)]) = (v))
122
123
124/* --- Thomas Wang integer hash functions --- */
125
126typedef khint32_t PetscHash32_t;
127typedef khint64_t PetscHash64_t;
128typedef khint_t PetscHash_t;
129
130/* Thomas Wang's first version for 32bit integers */
131PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHash_UInt32_v0(PetscHash32_t key)
132{
133 key += ~(key << 15);
134 key ^= (key >> 10);
135 key += (key << 3);
136 key ^= (key >> 6);
137 key += ~(key << 11);
138 key ^= (key >> 16);
139 return key;
140}
141
142/* Thomas Wang's second version for 32bit integers */
143PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHash_UInt32_v1(PetscHash32_t key)
144{
145 key = ~key + (key << 15); /* key = (key << 15) - key - 1; */
146 key = key ^ (key >> 12);
147 key = key + (key << 2);
148 key = key ^ (key >> 4);
149 key = key * 2057; /* key = (key + (key << 3)) + (key << 11); */
150 key = key ^ (key >> 16);
151 return key;
152}
153
154PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHash_UInt32(PetscHash32_t key)
155{
156 return PetscHash_UInt32_v1(key);
27
Calling 'PetscHash_UInt32_v1'
28
Returning from 'PetscHash_UInt32_v1'
157}
158
159/* Thomas Wang's version for 64bit integer -> 32bit hash */
160PETSC_STATIC_INLINEstatic inline PetscHash32_t PetscHash_UInt64_32(PetscHash64_t key)
161{
162 key = ~key + (key << 18); /* key = (key << 18) - key - 1; */
163 key = key ^ (key >> 31);
164 key = key * 21; /* key = (key + (key << 2)) + (key << 4); */
165 key = key ^ (key >> 11);
166 key = key + (key << 6);
167 key = key ^ (key >> 22);
168 return (PetscHash32_t)key;
169}
170
171/* Thomas Wang's version for 64bit integer -> 64bit hash */
172PETSC_STATIC_INLINEstatic inline PetscHash64_t PetscHash_UInt64_64(PetscHash64_t key)
173{
174 key = ~key + (key << 21); /* key = (key << 21) - key - 1; */
175 key = key ^ (key >> 24);
176 key = key * 265; /* key = (key + (key << 3)) + (key << 8); */
177 key = key ^ (key >> 14);
178 key = key * 21; /* key = (key + (key << 2)) + (key << 4); */
179 key = key ^ (key >> 28);
180 key = key + (key << 31);
181 return key;
182}
183
184PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHash_UInt64(PetscHash64_t key)
185{
186 return sizeof(PetscHash_t) < sizeof(PetscHash64_t)
187 ? (PetscHash_t)PetscHash_UInt64_32(key)
188 : (PetscHash_t)PetscHash_UInt64_64(key);
189}
190
191PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHashInt(PetscInt key)
192{
193#if defined(PETSC_USE_64BIT_INDICES)
194 return PetscHash_UInt64((PetscHash64_t)key);
195#else
196 return PetscHash_UInt32((PetscHash32_t)key);
26
Calling 'PetscHash_UInt32'
29
Returning from 'PetscHash_UInt32'
197#endif
198}
199
200PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHashCombine(PetscHash_t seed, PetscHash_t hash)
201{
202 /* https://doi.org/10.1002/asi.10170 */
203 /* https://dl.acm.org/citation.cfm?id=759509 */
204 return seed ^ (hash + (seed << 6) + (seed >> 2));
205}
206
207#define PetscHashEqual(a,b)((a) == (b)) ((a) == (b))
208
209
210#endif /* PETSC_HASHTABLE_H */