Bug Summary

File:dm/impls/plex/plexdistribute.c
Warning:line 196, column 10
Array access (via field 'flags') results in a null pointer dereference

Annotated Source Code

[?] Use j/k keys for keyboard navigation

/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c

1#include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/
2#include <petsc/private/dmlabelimpl.h> /*I "petscdmlabel.h" I*/
3
4/*@C
5 DMPlexSetAdjacencyUser - Define adjacency in the mesh using a user-provided callback
6
7 Input Parameters:
8+ dm - The DM object
9. user - The user callback, may be NULL (to clear the callback)
10- ctx - context for callback evaluation, may be NULL
11
12 Level: advanced
13
14 Notes:
15 The caller of DMPlexGetAdjacency may need to arrange that a large enough array is available for the adjacency.
16
17 Any setting here overrides other configuration of DMPlex adjacency determination.
18
19.seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexGetAdjacencyUser()
20@*/
21PetscErrorCode DMPlexSetAdjacencyUser(DM dm,PetscErrorCode (*user)(DM,PetscInt,PetscInt*,PetscInt[],void*),void *ctx)
22{
23 DM_Plex *mesh = (DM_Plex *)dm->data;
24
25 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 25; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
26 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),26,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),26,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),26,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),26,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
27 mesh->useradjacency = user;
28 mesh->useradjacencyctx = ctx;
29 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
30}
31
32/*@C
33 DMPlexGetAdjacencyUser - get the user-defined adjacency callback
34
35 Input Parameter:
36. dm - The DM object
37
38 Output Parameters:
39- user - The user callback
40- ctx - context for callback evaluation
41
42 Level: advanced
43
44.seealso: DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexGetAdjacency(), DMPlexSetAdjacencyUser()
45@*/
46PetscErrorCode DMPlexGetAdjacencyUser(DM dm, PetscErrorCode (**user)(DM,PetscInt,PetscInt*,PetscInt[],void*), void **ctx)
47{
48 DM_Plex *mesh = (DM_Plex *)dm->data;
49
50 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 50; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
51 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),51,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),51,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),51,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),51,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
52 if (user) *user = mesh->useradjacency;
53 if (ctx) *ctx = mesh->useradjacencyctx;
54 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
55}
56
57/*@
58 DMPlexSetAdjacencyUseAnchors - Define adjacency in the mesh using the point-to-point constraints.
59
60 Input Parameters:
61+ dm - The DM object
62- useAnchors - Flag to use the constraints. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
63
64 Level: intermediate
65
66.seealso: DMGetAdjacency(), DMSetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
67@*/
68PetscErrorCode DMPlexSetAdjacencyUseAnchors(DM dm, PetscBool useAnchors)
69{
70 DM_Plex *mesh = (DM_Plex *) dm->data;
71
72 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 72; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
73 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),73,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),73,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),73,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),73,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
74 mesh->useAnchors = useAnchors;
75 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
76}
77
78/*@
79 DMPlexGetAdjacencyUseAnchors - Query whether adjacency in the mesh uses the point-to-point constraints.
80
81 Input Parameter:
82. dm - The DM object
83
84 Output Parameter:
85. useAnchors - Flag to use the closure. If PETSC_TRUE, then constrained points are omitted from DMPlexGetAdjacency(), and their anchor points appear in their place.
86
87 Level: intermediate
88
89.seealso: DMPlexSetAdjacencyUseAnchors(), DMSetAdjacency(), DMGetAdjacency(), DMPlexDistribute(), DMPlexPreallocateOperator(), DMPlexSetAnchors()
90@*/
91PetscErrorCode DMPlexGetAdjacencyUseAnchors(DM dm, PetscBool *useAnchors)
92{
93 DM_Plex *mesh = (DM_Plex *) dm->data;
94
95 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 95; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
96 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),96,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),96,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),96,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),96,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
97 PetscValidIntPointer(useAnchors, 2)do { if (!useAnchors) return PetscError(((MPI_Comm)0x44000001
),97,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(useAnchors,PETSC_INT)) return PetscError
(((MPI_Comm)0x44000001),97,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,2); } while (0)
;
98 *useAnchors = mesh->useAnchors;
99 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
100}
101
102static PetscErrorCode DMPlexGetAdjacency_Cone_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
103{
104 const PetscInt *cone = NULL((void*)0);
105 PetscInt numAdj = 0, maxAdjSize = *adjSize, coneSize, c;
106 PetscErrorCode ierr;
107
108 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 108; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
109 ierr = DMPlexGetConeSize(dm, p, &coneSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),109,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
110 ierr = DMPlexGetCone(dm, p, &cone);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),110,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
111 for (c = 0; c <= coneSize; ++c) {
112 const PetscInt point = !c ? p : cone[c-1];
113 const PetscInt *support = NULL((void*)0);
114 PetscInt supportSize, s, q;
115
116 ierr = DMPlexGetSupportSize(dm, point, &supportSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),116,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
117 ierr = DMPlexGetSupport(dm, point, &support);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),117,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
118 for (s = 0; s < supportSize; ++s) {
119 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = support[s]),0); ++q) {
120 if (support[s] == adj[q]) break;
121 }
122 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize)return PetscError(((MPI_Comm)0x44000001),122,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Invalid mesh exceeded adjacency allocation (%D)"
,maxAdjSize)
;
123 }
124 }
125 *adjSize = numAdj;
126 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
127}
128
129static PetscErrorCode DMPlexGetAdjacency_Support_Internal(DM dm, PetscInt p, PetscInt *adjSize, PetscInt adj[])
130{
131 const PetscInt *support = NULL((void*)0);
132 PetscInt numAdj = 0, maxAdjSize = *adjSize, supportSize, s;
133 PetscErrorCode ierr;
134
135 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 135; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
136 ierr = DMPlexGetSupportSize(dm, p, &supportSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),136,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
137 ierr = DMPlexGetSupport(dm, p, &support);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),137,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
138 for (s = 0; s <= supportSize; ++s) {
139 const PetscInt point = !s ? p : support[s-1];
140 const PetscInt *cone = NULL((void*)0);
141 PetscInt coneSize, c, q;
142
143 ierr = DMPlexGetConeSize(dm, point, &coneSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),143,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
144 ierr = DMPlexGetCone(dm, point, &cone);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),144,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
145 for (c = 0; c < coneSize; ++c) {
146 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = cone[c]),0); ++q) {
147 if (cone[c] == adj[q]) break;
148 }
149 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize)return PetscError(((MPI_Comm)0x44000001),149,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Invalid mesh exceeded adjacency allocation (%D)"
,maxAdjSize)
;
150 }
151 }
152 *adjSize = numAdj;
153 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
154}
155
156static PetscErrorCode DMPlexGetAdjacency_Transitive_Internal(DM dm, PetscInt p, PetscBool useClosure, PetscInt *adjSize, PetscInt adj[])
157{
158 PetscInt *star = NULL((void*)0);
159 PetscInt numAdj = 0, maxAdjSize = *adjSize, starSize, s;
160 PetscErrorCode ierr;
161
162 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 162; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
163 ierr = DMPlexGetTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),163,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
164 for (s = 0; s < starSize*2; s += 2) {
165 const PetscInt *closure = NULL((void*)0);
166 PetscInt closureSize, c, q;
167
168 ierr = DMPlexGetTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),168,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
169 for (c = 0; c < closureSize*2; c += 2) {
170 for (q = 0; q < numAdj || ((void)(adj[numAdj++] = closure[c]),0); ++q) {
171 if (closure[c] == adj[q]) break;
172 }
173 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize)return PetscError(((MPI_Comm)0x44000001),173,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Invalid mesh exceeded adjacency allocation (%D)"
,maxAdjSize)
;
174 }
175 ierr = DMPlexRestoreTransitiveClosure(dm, star[s], (PetscBool)!useClosure, &closureSize, (PetscInt**) &closure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),175,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
176 }
177 ierr = DMPlexRestoreTransitiveClosure(dm, p, useClosure, &starSize, &star);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),177,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
178 *adjSize = numAdj;
179 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
180}
181
182PetscErrorCode DMPlexGetAdjacency_Internal(DM dm, PetscInt p, PetscBool useCone, PetscBool useTransitiveClosure, PetscBool useAnchors, PetscInt *adjSize, PetscInt *adj[])
183{
184 static PetscInt asiz = 0;
185 PetscInt maxAnchors = 1;
186 PetscInt aStart = -1, aEnd = -1;
187 PetscInt maxAdjSize;
188 PetscSection aSec = NULL((void*)0);
189 IS aIS = NULL((void*)0);
190 const PetscInt *anchors;
191 DM_Plex *mesh = (DM_Plex *)dm->data;
192 PetscErrorCode ierr;
193
194 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 194; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
195 if (useAnchors) {
196 ierr = DMPlexGetAnchors(dm,&aSec,&aIS);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),196,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
197 if (aSec) {
198 ierr = PetscSectionGetMaxDof(aSec,&maxAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),198,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
199 maxAnchors = PetscMax(1,maxAnchors)(((1)<(maxAnchors)) ? (maxAnchors) : (1));
200 ierr = PetscSectionGetChart(aSec,&aStart,&aEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),200,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
201 ierr = ISGetIndices(aIS,&anchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),201,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
202 }
203 }
204 if (!*adj) {
205 PetscInt depth, coneSeries, supportSeries, maxC, maxS, pStart, pEnd;
206
207 ierr = DMPlexGetChart(dm, &pStart,&pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),207,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
208 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),208,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
209 ierr = DMPlexGetMaxSizes(dm, &maxC, &maxS);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),209,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
210 coneSeries = (maxC > 1) ? ((PetscPowInt(maxC,depth+1)-1)/(maxC-1)) : depth+1;
211 supportSeries = (maxS > 1) ? ((PetscPowInt(maxS,depth+1)-1)/(maxS-1)) : depth+1;
212 asiz = PetscMax(PetscPowInt(maxS,depth)*coneSeries,PetscPowInt(maxC,depth)*supportSeries)(((PetscPowInt(maxS,depth)*coneSeries)<(PetscPowInt(maxC,depth
)*supportSeries)) ? (PetscPowInt(maxC,depth)*supportSeries) :
(PetscPowInt(maxS,depth)*coneSeries))
;
213 asiz *= maxAnchors;
214 asiz = PetscMin(asiz,pEnd-pStart)(((asiz)<(pEnd-pStart)) ? (asiz) : (pEnd-pStart));
215 ierr = PetscMalloc1(asiz,adj)PetscMallocA(1,PETSC_FALSE,215,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(asiz)*sizeof(**(adj)),(adj))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),215,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
216 }
217 if (*adjSize < 0) *adjSize = asiz;
218 maxAdjSize = *adjSize;
219 if (mesh->useradjacency) {
220 ierr = mesh->useradjacency(dm, p, adjSize, *adj, mesh->useradjacencyctx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),220,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
221 } else if (useTransitiveClosure) {
222 ierr = DMPlexGetAdjacency_Transitive_Internal(dm, p, useCone, adjSize, *adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),222,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
223 } else if (useCone) {
224 ierr = DMPlexGetAdjacency_Cone_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),224,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
225 } else {
226 ierr = DMPlexGetAdjacency_Support_Internal(dm, p, adjSize, *adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),226,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
227 }
228 if (useAnchors && aSec) {
229 PetscInt origSize = *adjSize;
230 PetscInt numAdj = origSize;
231 PetscInt i = 0, j;
232 PetscInt *orig = *adj;
233
234 while (i < origSize) {
235 PetscInt p = orig[i];
236 PetscInt aDof = 0;
237
238 if (p >= aStart && p < aEnd) {
239 ierr = PetscSectionGetDof(aSec,p,&aDof);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),239,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
240 }
241 if (aDof) {
242 PetscInt aOff;
243 PetscInt s, q;
244
245 for (j = i + 1; j < numAdj; j++) {
246 orig[j - 1] = orig[j];
247 }
248 origSize--;
249 numAdj--;
250 ierr = PetscSectionGetOffset(aSec,p,&aOff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),250,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
251 for (s = 0; s < aDof; ++s) {
252 for (q = 0; q < numAdj || ((void)(orig[numAdj++] = anchors[aOff+s]),0); ++q) {
253 if (anchors[aOff+s] == orig[q]) break;
254 }
255 if (numAdj > maxAdjSize) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid mesh exceeded adjacency allocation (%D)", maxAdjSize)return PetscError(((MPI_Comm)0x44000001),255,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Invalid mesh exceeded adjacency allocation (%D)"
,maxAdjSize)
;
256 }
257 }
258 else {
259 i++;
260 }
261 }
262 *adjSize = numAdj;
263 ierr = ISRestoreIndices(aIS,&anchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),263,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
264 }
265 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
266}
267
268/*@
269 DMPlexGetAdjacency - Return all points adjacent to the given point
270
271 Input Parameters:
272+ dm - The DM object
273. p - The point
274. adjSize - The maximum size of adj if it is non-NULL, or PETSC_DETERMINE
275- adj - Either NULL so that the array is allocated, or an existing array with size adjSize
276
277 Output Parameters:
278+ adjSize - The number of adjacent points
279- adj - The adjacent points
280
281 Level: advanced
282
283 Notes:
284 The user must PetscFree the adj array if it was not passed in.
285
286.seealso: DMSetAdjacency(), DMPlexDistribute(), DMCreateMatrix(), DMPlexPreallocateOperator()
287@*/
288PetscErrorCode DMPlexGetAdjacency(DM dm, PetscInt p, PetscInt *adjSize, PetscInt *adj[])
289{
290 PetscBool useCone, useClosure, useAnchors;
291 PetscErrorCode ierr;
292
293 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 293; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
294 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),294,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),294,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),294,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),294,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
295 PetscValidPointer(adjSize,3)do { if (!adjSize) return PetscError(((MPI_Comm)0x44000001),295
,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(adjSize,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),295,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
296 PetscValidPointer(adj,4)do { if (!adj) return PetscError(((MPI_Comm)0x44000001),296,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(adj,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),296,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
297 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),297,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
298 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),298,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
299 ierr = DMPlexGetAdjacency_Internal(dm, p, useCone, useClosure, useAnchors, adjSize, adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),299,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
300 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
301}
302
303/*@
304 DMPlexCreateTwoSidedProcessSF - Create an SF which just has process connectivity
305
306 Collective on DM
307
308 Input Parameters:
309+ dm - The DM
310- sfPoint - The PetscSF which encodes point connectivity
311
312 Output Parameters:
313+ processRanks - A list of process neighbors, or NULL
314- sfProcess - An SF encoding the two-sided process connectivity, or NULL
315
316 Level: developer
317
318.seealso: PetscSFCreate(), DMPlexCreateProcessSF()
319@*/
320PetscErrorCode DMPlexCreateTwoSidedProcessSF(DM dm, PetscSF sfPoint, PetscSection rootRankSection, IS rootRanks, PetscSection leafRankSection, IS leafRanks, IS *processRanks, PetscSF *sfProcess)
321{
322 const PetscSFNode *remotePoints;
323 PetscInt *localPointsNew;
324 PetscSFNode *remotePointsNew;
325 const PetscInt *nranks;
326 PetscInt *ranksNew;
327 PetscBT neighbors;
328 PetscInt pStart, pEnd, p, numLeaves, l, numNeighbors, n;
329 PetscMPIInt size, proc, rank;
330 PetscErrorCode ierr;
331
332 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 332; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
333 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),333,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),333,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),333,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),333,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
334 PetscValidHeaderSpecific(sfPoint, PETSCSF_CLASSID, 2)do { if (!sfPoint) return PetscError(((MPI_Comm)0x44000001),334
,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(sfPoint,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),334,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(sfPoint))->classid != PETSCSF_CLASSID
) { if (((PetscObject)(sfPoint))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),334,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),334,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,2); } } while (0)
;
335 if (processRanks) {PetscValidPointer(processRanks, 3)do { if (!processRanks) return PetscError(((MPI_Comm)0x44000001
),335,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(processRanks,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),335,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;}
336 if (sfProcess) {PetscValidPointer(sfProcess, 4)do { if (!sfProcess) return PetscError(((MPI_Comm)0x44000001)
,336,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(sfProcess,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),336,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;}
337 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),337,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
338 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),338,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
339 ierr = PetscSFGetGraph(sfPoint, NULL((void*)0), &numLeaves, NULL((void*)0), &remotePoints);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),339,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
340 ierr = PetscBTCreate(size, &neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),340,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
341 ierr = PetscBTMemzero(size, neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),341,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
342 /* Compute root-to-leaf process connectivity */
343 ierr = PetscSectionGetChart(rootRankSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),343,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
344 ierr = ISGetIndices(rootRanks, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),344,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
345 for (p = pStart; p < pEnd; ++p) {
346 PetscInt ndof, noff, n;
347
348 ierr = PetscSectionGetDof(rootRankSection, p, &ndof);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),348,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
349 ierr = PetscSectionGetOffset(rootRankSection, p, &noff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),349,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
350 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),350,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
351 }
352 ierr = ISRestoreIndices(rootRanks, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),352,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
353 /* Compute leaf-to-neighbor process connectivity */
354 ierr = PetscSectionGetChart(leafRankSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),354,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
355 ierr = ISGetIndices(leafRanks, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),355,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
356 for (p = pStart; p < pEnd; ++p) {
357 PetscInt ndof, noff, n;
358
359 ierr = PetscSectionGetDof(leafRankSection, p, &ndof);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),359,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
360 ierr = PetscSectionGetOffset(leafRankSection, p, &noff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),360,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
361 for (n = 0; n < ndof; ++n) {ierr = PetscBTSet(neighbors, nranks[noff+n]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),361,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
362 }
363 ierr = ISRestoreIndices(leafRanks, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),363,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
364 /* Compute leaf-to-root process connectivity */
365 for (l = 0; l < numLeaves; ++l) {PetscBTSet(neighbors, remotePoints[l].rank);}
366 /* Calculate edges */
367 PetscBTClear(neighbors, rank);
368 for(proc = 0, numNeighbors = 0; proc < size; ++proc) {if (PetscBTLookup(neighbors, proc)) ++numNeighbors;}
369 ierr = PetscMalloc1(numNeighbors, &ranksNew)PetscMallocA(1,PETSC_FALSE,369,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numNeighbors)*sizeof(**(&ranksNew)),(&ranksNew
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),369,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
370 ierr = PetscMalloc1(numNeighbors, &localPointsNew)PetscMallocA(1,PETSC_FALSE,370,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numNeighbors)*sizeof(**(&localPointsNew)),(&
localPointsNew))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),370,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
371 ierr = PetscMalloc1(numNeighbors, &remotePointsNew)PetscMallocA(1,PETSC_FALSE,371,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numNeighbors)*sizeof(**(&remotePointsNew)),(&
remotePointsNew))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),371,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
372 for(proc = 0, n = 0; proc < size; ++proc) {
373 if (PetscBTLookup(neighbors, proc)) {
374 ranksNew[n] = proc;
375 localPointsNew[n] = proc;
376 remotePointsNew[n].index = rank;
377 remotePointsNew[n].rank = proc;
378 ++n;
379 }
380 }
381 ierr = PetscBTDestroy(&neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),381,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
382 if (processRanks) {ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), numNeighbors, ranksNew, PETSC_OWN_POINTER, processRanks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),382,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
383 else {ierr = PetscFree(ranksNew)((*PetscTrFree)((void*)(ranksNew),383,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((ranksNew) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),383,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
384 if (sfProcess) {
385 ierr = PetscSFCreate(PetscObjectComm((PetscObject)dm), sfProcess);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),385,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
386 ierr = PetscObjectSetName((PetscObject) *sfProcess, "Two-Sided Process SF");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),386,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
387 ierr = PetscSFSetFromOptions(*sfProcess);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),387,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
388 ierr = PetscSFSetGraph(*sfProcess, size, numNeighbors, localPointsNew, PETSC_OWN_POINTER, remotePointsNew, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),388,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
389 }
390 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
391}
392
393/*@
394 DMPlexDistributeOwnership - Compute owner information for shared points. This basically gets two-sided for an SF.
395
396 Collective on DM
397
398 Input Parameter:
399. dm - The DM
400
401 Output Parameters:
402+ rootSection - The number of leaves for a given root point
403. rootrank - The rank of each edge into the root point
404. leafSection - The number of processes sharing a given leaf point
405- leafrank - The rank of each process sharing a leaf point
406
407 Level: developer
408
409.seealso: DMPlexCreateOverlap()
410@*/
411PetscErrorCode DMPlexDistributeOwnership(DM dm, PetscSection rootSection, IS *rootrank, PetscSection leafSection, IS *leafrank)
412{
413 MPI_Comm comm;
414 PetscSF sfPoint;
415 const PetscInt *rootdegree;
416 PetscInt *myrank, *remoterank;
417 PetscInt pStart, pEnd, p, nedges;
418 PetscMPIInt rank;
419 PetscErrorCode ierr;
420
421 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 421; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
422 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),422,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
423 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),423,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
424 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),424,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
425 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),425,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
426 /* Compute number of leaves for each root */
427 ierr = PetscObjectSetName((PetscObject) rootSection, "Root Section");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),427,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
428 ierr = PetscSectionSetChart(rootSection, pStart, pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),428,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
429 ierr = PetscSFComputeDegreeBegin(sfPoint, &rootdegree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),429,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
430 ierr = PetscSFComputeDegreeEnd(sfPoint, &rootdegree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),430,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
431 for (p = pStart; p < pEnd; ++p) {ierr = PetscSectionSetDof(rootSection, p, rootdegree[p-pStart]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),431,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
432 ierr = PetscSectionSetUp(rootSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),432,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
433 /* Gather rank of each leaf to root */
434 ierr = PetscSectionGetStorageSize(rootSection, &nedges);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),434,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
435 ierr = PetscMalloc1(pEnd-pStart, &myrank)PetscMallocA(1,PETSC_FALSE,435,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(pEnd-pStart)*sizeof(**(&myrank)),(&myrank))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),435,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
436 ierr = PetscMalloc1(nedges, &remoterank)PetscMallocA(1,PETSC_FALSE,436,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nedges)*sizeof(**(&remoterank)),(&remoterank
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),436,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
437 for (p = 0; p < pEnd-pStart; ++p) myrank[p] = rank;
438 ierr = PetscSFGatherBegin(sfPoint, MPIU_INT((MPI_Datatype)0x4c000405), myrank, remoterank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),438,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
439 ierr = PetscSFGatherEnd(sfPoint, MPIU_INT((MPI_Datatype)0x4c000405), myrank, remoterank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),439,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
440 ierr = PetscFree(myrank)((*PetscTrFree)((void*)(myrank),440,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((myrank) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),440,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
441 ierr = ISCreateGeneral(comm, nedges, remoterank, PETSC_OWN_POINTER, rootrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),441,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
442 /* Distribute remote ranks to leaves */
443 ierr = PetscObjectSetName((PetscObject) leafSection, "Leaf Section");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),443,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
444 ierr = DMPlexDistributeFieldIS(dm, sfPoint, rootSection, *rootrank, leafSection, leafrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),444,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
445 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
446}
447
448/*@C
449 DMPlexCreateOverlap - Compute owner information for shared points. This basically gets two-sided for an SF.
450
451 Collective on DM
452
453 Input Parameters:
454+ dm - The DM
455. levels - Number of overlap levels
456. rootSection - The number of leaves for a given root point
457. rootrank - The rank of each edge into the root point
458. leafSection - The number of processes sharing a given leaf point
459- leafrank - The rank of each process sharing a leaf point
460
461 Output Parameters:
462+ ovLabel - DMLabel containing remote overlap contributions as point/rank pairings
463
464 Level: developer
465
466.seealso: DMPlexDistributeOwnership(), DMPlexDistribute()
467@*/
468PetscErrorCode DMPlexCreateOverlap(DM dm, PetscInt levels, PetscSection rootSection, IS rootrank, PetscSection leafSection, IS leafrank, DMLabel *ovLabel)
469{
470 MPI_Comm comm;
471 DMLabel ovAdjByRank; /* A DMLabel containing all points adjacent to shared points, separated by rank (value in label) */
472 PetscSF sfPoint;
473 const PetscSFNode *remote;
474 const PetscInt *local;
475 const PetscInt *nrank, *rrank;
476 PetscInt *adj = NULL((void*)0);
477 PetscInt pStart, pEnd, p, sStart, sEnd, nleaves, l;
478 PetscMPIInt rank, size;
479 PetscBool flg;
480 PetscErrorCode ierr;
481
482 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 482; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
483 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),483,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
484 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),484,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
485 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),485,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
486 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),486,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
487 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),487,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
488 ierr = PetscSectionGetChart(leafSection, &sStart, &sEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),488,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
489 ierr = PetscSFGetGraph(sfPoint, NULL((void*)0), &nleaves, &local, &remote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),489,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
490 ierr = DMLabelCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Overlap adjacency", &ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),490,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
491 /* Handle leaves: shared with the root point */
492 for (l = 0; l < nleaves; ++l) {
493 PetscInt adjSize = PETSC_DETERMINE-1, a;
494
495 ierr = DMPlexGetAdjacency(dm, local ? local[l] : l, &adjSize, &adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),495,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
496 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remote[l].rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),496,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
497 }
498 ierr = ISGetIndices(rootrank, &rrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),498,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
499 ierr = ISGetIndices(leafrank, &nrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),499,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
500 /* Handle roots */
501 for (p = pStart; p < pEnd; ++p) {
502 PetscInt adjSize = PETSC_DETERMINE-1, neighbors = 0, noff, n, a;
503
504 if ((p >= sStart) && (p < sEnd)) {
505 /* Some leaves share a root with other leaves on different processes */
506 ierr = PetscSectionGetDof(leafSection, p, &neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),506,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
507 if (neighbors) {
508 ierr = PetscSectionGetOffset(leafSection, p, &noff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),508,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
509 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),509,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
510 for (n = 0; n < neighbors; ++n) {
511 const PetscInt remoteRank = nrank[noff+n];
512
513 if (remoteRank == rank) continue;
514 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),514,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
515 }
516 }
517 }
518 /* Roots are shared with leaves */
519 ierr = PetscSectionGetDof(rootSection, p, &neighbors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),519,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
520 if (!neighbors) continue;
521 ierr = PetscSectionGetOffset(rootSection, p, &noff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),521,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
522 ierr = DMPlexGetAdjacency(dm, p, &adjSize, &adj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),522,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
523 for (n = 0; n < neighbors; ++n) {
524 const PetscInt remoteRank = rrank[noff+n];
525
526 if (remoteRank == rank) continue;
527 for (a = 0; a < adjSize; ++a) {ierr = DMLabelSetValue(ovAdjByRank, adj[a], remoteRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),527,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
528 }
529 }
530 ierr = PetscFree(adj)((*PetscTrFree)((void*)(adj),530,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((adj) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),530,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
531 ierr = ISRestoreIndices(rootrank, &rrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),531,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
532 ierr = ISRestoreIndices(leafrank, &nrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),532,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
533 /* Add additional overlap levels */
534 for (l = 1; l < levels; l++) {
535 /* Propagate point donations over SF to capture remote connections */
536 ierr = DMPlexPartitionLabelPropagate(dm, ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),536,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
537 /* Add next level of point donations to the label */
538 ierr = DMPlexPartitionLabelAdjacency(dm, ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),538,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
539 }
540 /* We require the closure in the overlap */
541 ierr = DMPlexPartitionLabelClosure(dm, ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),541,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
542 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-overlap_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),542,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
543 if (flg) {
544 PetscViewer viewer;
545 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm), &viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),545,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
546 ierr = DMLabelView(ovAdjByRank, viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),546,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
547 }
548 /* Invert sender to receiver label */
549 ierr = DMLabelCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Overlap label", ovLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),549,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
550 ierr = DMPlexPartitionLabelInvert(dm, ovAdjByRank, NULL((void*)0), *ovLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),550,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
551 /* Add owned points, except for shared local points */
552 for (p = pStart; p < pEnd; ++p) {ierr = DMLabelSetValue(*ovLabel, p, rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),552,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
553 for (l = 0; l < nleaves; ++l) {
554 ierr = DMLabelClearValue(*ovLabel, local[l], rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),554,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
555 ierr = DMLabelSetValue(*ovLabel, remote[l].index, remote[l].rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),555,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
556 }
557 /* Clean up */
558 ierr = DMLabelDestroy(&ovAdjByRank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),558,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
559 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
560}
561
562/*@C
563 DMPlexCreateOverlapMigrationSF - Create an SF describing the new mesh distribution to make the overlap described by the input SF
564
565 Collective on DM
566
567 Input Parameters:
568+ dm - The DM
569- overlapSF - The SF mapping ghost points in overlap to owner points on other processes
570
571 Output Parameters:
572+ migrationSF - An SF that maps original points in old locations to points in new locations
573
574 Level: developer
575
576.seealso: DMPlexCreateOverlap(), DMPlexDistribute()
577@*/
578PetscErrorCode DMPlexCreateOverlapMigrationSF(DM dm, PetscSF overlapSF, PetscSF *migrationSF)
579{
580 MPI_Comm comm;
581 PetscMPIInt rank, size;
582 PetscInt d, dim, p, pStart, pEnd, nroots, nleaves, newLeaves, point, numSharedPoints;
583 PetscInt *pointDepths, *remoteDepths, *ilocal;
584 PetscInt *depthRecv, *depthShift, *depthIdx;
585 PetscSFNode *iremote;
586 PetscSF pointSF;
587 const PetscInt *sharedLocal;
588 const PetscSFNode *overlapRemote, *sharedRemote;
589 PetscErrorCode ierr;
590
591 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 591; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
592 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),592,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),592,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),592,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),592,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
593 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),593,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
594 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),594,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
595 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),595,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
596 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),596,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
597
598 /* Before building the migration SF we need to know the new stratum offsets */
599 ierr = PetscSFGetGraph(overlapSF, &nroots, &nleaves, NULL((void*)0), &overlapRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),599,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
600 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths)PetscMallocA(2,PETSC_FALSE,600,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nroots)*sizeof(**(&pointDepths)),(&pointDepths
),(size_t)(nleaves)*sizeof(**(&remoteDepths)),(&remoteDepths
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),600,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
601 for (d=0; d<dim+1; d++) {
602 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),602,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
603 for (p=pStart; p<pEnd; p++) pointDepths[p] = d;
604 }
605 for (p=0; p<nleaves; p++) remoteDepths[p] = -1;
606 ierr = PetscSFBcastBegin(overlapSF, MPIU_INT((MPI_Datatype)0x4c000405), pointDepths, remoteDepths);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),606,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
607 ierr = PetscSFBcastEnd(overlapSF, MPIU_INT((MPI_Datatype)0x4c000405), pointDepths, remoteDepths);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),607,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
608
609 /* Count recevied points in each stratum and compute the internal strata shift */
610 ierr = PetscMalloc3(dim+1, &depthRecv, dim+1, &depthShift, dim+1, &depthIdx)PetscMallocA(3,PETSC_FALSE,610,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(dim+1)*sizeof(**(&depthRecv)),(&depthRecv),
(size_t)(dim+1)*sizeof(**(&depthShift)),(&depthShift)
,(size_t)(dim+1)*sizeof(**(&depthIdx)),(&depthIdx))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),610,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
611 for (d=0; d<dim+1; d++) depthRecv[d]=0;
612 for (p=0; p<nleaves; p++) depthRecv[remoteDepths[p]]++;
613 depthShift[dim] = 0;
614 for (d=0; d<dim; d++) depthShift[d] = depthRecv[dim];
615 for (d=1; d<dim; d++) depthShift[d] += depthRecv[0];
616 for (d=dim-2; d>0; d--) depthShift[d] += depthRecv[d+1];
617 for (d=0; d<dim+1; d++) {
618 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),618,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
619 depthIdx[d] = pStart + depthShift[d];
620 }
621
622 /* Form the overlap SF build an SF that describes the full overlap migration SF */
623 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),623,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
624 newLeaves = pEnd - pStart + nleaves;
625 ierr = PetscMalloc1(newLeaves, &ilocal)PetscMallocA(1,PETSC_FALSE,625,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(newLeaves)*sizeof(**(&ilocal)),(&ilocal))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),625,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
626 ierr = PetscMalloc1(newLeaves, &iremote)PetscMallocA(1,PETSC_FALSE,626,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(newLeaves)*sizeof(**(&iremote)),(&iremote))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),626,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
627 /* First map local points to themselves */
628 for (d=0; d<dim+1; d++) {
629 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),629,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
630 for (p=pStart; p<pEnd; p++) {
631 point = p + depthShift[d];
632 ilocal[point] = point;
633 iremote[point].index = p;
634 iremote[point].rank = rank;
635 depthIdx[d]++;
636 }
637 }
638
639 /* Add in the remote roots for currently shared points */
640 ierr = DMGetPointSF(dm, &pointSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),640,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
641 ierr = PetscSFGetGraph(pointSF, NULL((void*)0), &numSharedPoints, &sharedLocal, &sharedRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),641,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
642 for (d=0; d<dim+1; d++) {
643 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),643,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
644 for (p=0; p<numSharedPoints; p++) {
645 if (pStart <= sharedLocal[p] && sharedLocal[p] < pEnd) {
646 point = sharedLocal[p] + depthShift[d];
647 iremote[point].index = sharedRemote[p].index;
648 iremote[point].rank = sharedRemote[p].rank;
649 }
650 }
651 }
652
653 /* Now add the incoming overlap points */
654 for (p=0; p<nleaves; p++) {
655 point = depthIdx[remoteDepths[p]];
656 ilocal[point] = point;
657 iremote[point].index = overlapRemote[p].index;
658 iremote[point].rank = overlapRemote[p].rank;
659 depthIdx[remoteDepths[p]]++;
660 }
661 ierr = PetscFree2(pointDepths,remoteDepths)PetscFreeA(2,661,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,&(pointDepths),&(remoteDepths))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),661,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
662
663 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),663,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
664 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Overlap Migration SF");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),664,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
665 ierr = PetscSFSetFromOptions(*migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),665,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
666 ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),666,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
667 ierr = PetscSFSetGraph(*migrationSF, pEnd-pStart, newLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),667,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
668
669 ierr = PetscFree3(depthRecv, depthShift, depthIdx)PetscFreeA(3,669,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,&(depthRecv),&(depthShift),&(depthIdx))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),669,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
670 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
671}
672
673/*@
674 DMPlexStratifyMigrationSF - Rearrange the leaves of a migration sf for stratification.
675
676 Input Parameter:
677+ dm - The DM
678- sf - A star forest with non-ordered leaves, usually defining a DM point migration
679
680 Output Parameter:
681. migrationSF - A star forest with added leaf indirection that ensures the resulting DM is stratified
682
683 Level: developer
684
685.seealso: DMPlexPartitionLabelCreateSF(), DMPlexDistribute(), DMPlexDistributeOverlap()
686@*/
687PetscErrorCode DMPlexStratifyMigrationSF(DM dm, PetscSF sf, PetscSF *migrationSF)
688{
689 MPI_Comm comm;
690 PetscMPIInt rank, size;
691 PetscInt d, ldepth, depth, p, pStart, pEnd, nroots, nleaves;
692 PetscInt *pointDepths, *remoteDepths, *ilocal;
693 PetscInt *depthRecv, *depthShift, *depthIdx;
694 PetscInt hybEnd[4];
695 const PetscSFNode *iremote;
696 PetscErrorCode ierr;
697
698 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 698; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
699 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),699,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),699,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),699,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),699,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
700 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),700,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
701 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),701,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
702 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),702,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
703 ierr = DMPlexGetDepth(dm, &ldepth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),703,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
704 ierr = MPIU_Allreduce(&ldepth, &depth, 1, MPIU_INT, MPI_MAX, comm)(PetscAllreduceBarrierCheck(comm,1,704,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&ldepth),(&depth),(1),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),704,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
705 if ((ldepth >= 0) && (depth != ldepth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", ldepth, depth)return PetscError(((MPI_Comm)0x44000001),705,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Inconsistent Plex depth %d != %d",ldepth
,depth)
;
706
707 /* Before building the migration SF we need to know the new stratum offsets */
708 ierr = PetscSFGetGraph(sf, &nroots, &nleaves, NULL((void*)0), &iremote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),708,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
709 ierr = PetscMalloc2(nroots, &pointDepths, nleaves, &remoteDepths)PetscMallocA(2,PETSC_FALSE,709,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nroots)*sizeof(**(&pointDepths)),(&pointDepths
),(size_t)(nleaves)*sizeof(**(&remoteDepths)),(&remoteDepths
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),709,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
710 ierr = DMPlexGetHybridBounds(dm,&hybEnd[depth],&hybEnd[PetscMax(depth-1,0)(((depth-1)<(0)) ? (0) : (depth-1))],&hybEnd[1],&hybEnd[0]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),710,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
711 for (d = 0; d < depth+1; ++d) {
712 ierr = DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),712,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
713 for (p = pStart; p < pEnd; ++p) {
714 if (hybEnd[d] >= 0 && p >= hybEnd[d]) { /* put in a separate value for hybrid points */
715 pointDepths[p] = 2 * d;
716 } else {
717 pointDepths[p] = 2 * d + 1;
718 }
719 }
720 }
721 for (p = 0; p < nleaves; ++p) remoteDepths[p] = -1;
722 ierr = PetscSFBcastBegin(sf, MPIU_INT((MPI_Datatype)0x4c000405), pointDepths, remoteDepths);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),722,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
723 ierr = PetscSFBcastEnd(sf, MPIU_INT((MPI_Datatype)0x4c000405), pointDepths, remoteDepths);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),723,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
724 /* Count recevied points in each stratum and compute the internal strata shift */
725 ierr = PetscMalloc3(2*(depth+1), &depthRecv, 2*(depth+1), &depthShift, 2*(depth+1), &depthIdx)PetscMallocA(3,PETSC_FALSE,725,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(2*(depth+1))*sizeof(**(&depthRecv)),(&depthRecv
),(size_t)(2*(depth+1))*sizeof(**(&depthShift)),(&depthShift
),(size_t)(2*(depth+1))*sizeof(**(&depthIdx)),(&depthIdx
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),725,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
726 for (d = 0; d < 2*(depth+1); ++d) depthRecv[d] = 0;
727 for (p = 0; p < nleaves; ++p) depthRecv[remoteDepths[p]]++;
728 depthShift[2*depth+1] = 0;
729 for (d = 0; d < 2*depth+1; ++d) depthShift[d] = depthRecv[2 * depth + 1];
730 for (d = 0; d < 2*depth; ++d) depthShift[d] += depthRecv[2 * depth];
731 depthShift[0] += depthRecv[1];
732 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[1];
733 for (d = 2; d < 2*depth; ++d) depthShift[d] += depthRecv[0];
734 for (d = 2 * depth-1; d > 2; --d) {
735 PetscInt e;
736
737 for (e = d -1; e > 1; --e) depthShift[e] += depthRecv[d];
738 }
739 for (d = 0; d < 2*(depth+1); ++d) {depthIdx[d] = 0;}
740 /* Derive a new local permutation based on stratified indices */
741 ierr = PetscMalloc1(nleaves, &ilocal)PetscMallocA(1,PETSC_FALSE,741,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nleaves)*sizeof(**(&ilocal)),(&ilocal))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),741,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
742 for (p = 0; p < nleaves; ++p) {
743 const PetscInt dep = remoteDepths[p];
744
745 ilocal[p] = depthShift[dep] + depthIdx[dep];
746 depthIdx[dep]++;
747 }
748 ierr = PetscSFCreate(comm, migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),748,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
749 ierr = PetscObjectSetName((PetscObject) *migrationSF, "Migration SF");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),749,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
750 ierr = PetscSFSetGraph(*migrationSF, nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_COPY_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),750,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
751 ierr = PetscFree2(pointDepths,remoteDepths)PetscFreeA(2,751,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,&(pointDepths),&(remoteDepths))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),751,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
752 ierr = PetscFree3(depthRecv, depthShift, depthIdx)PetscFreeA(3,752,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,&(depthRecv),&(depthShift),&(depthIdx))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),752,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
753 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
754}
755
756/*@
757 DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
758
759 Collective on DM
760
761 Input Parameters:
762+ dm - The DMPlex object
763. pointSF - The PetscSF describing the communication pattern
764. originalSection - The PetscSection for existing data layout
765- originalVec - The existing data
766
767 Output Parameters:
768+ newSection - The PetscSF describing the new data layout
769- newVec - The new data
770
771 Level: developer
772
773.seealso: DMPlexDistribute(), DMPlexDistributeFieldIS(), DMPlexDistributeData()
774@*/
775PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
776{
777 PetscSF fieldSF;
778 PetscInt *remoteOffsets, fieldSize;
779 PetscScalar *originalValues, *newValues;
780 PetscErrorCode ierr;
781
782 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 782; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
783 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeField].active) ? (*PetscLogPLB)((DMPLEX_DistributeField
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),783,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
784 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),784,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
785
786 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),786,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
787 ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),787,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
788 ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),788,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
789
790 ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),790,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
791 ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),791,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
792 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),792,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
793 ierr = PetscFree(remoteOffsets)((*PetscTrFree)((void*)(remoteOffsets),793,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsets) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),793,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
794 ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR((MPI_Datatype)0x4c00080b), originalValues, newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),794,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
795 ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR((MPI_Datatype)0x4c00080b), originalValues, newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),795,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
796 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),796,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
797 ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),797,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
798 ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),798,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
799 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeField].active) ? (*PetscLogPLE)((DMPLEX_DistributeField
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),799,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
800 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
801}
802
803/*@
804 DMPlexDistributeFieldIS - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
805
806 Collective on DM
807
808 Input Parameters:
809+ dm - The DMPlex object
810. pointSF - The PetscSF describing the communication pattern
811. originalSection - The PetscSection for existing data layout
812- originalIS - The existing data
813
814 Output Parameters:
815+ newSection - The PetscSF describing the new data layout
816- newIS - The new data
817
818 Level: developer
819
820.seealso: DMPlexDistribute(), DMPlexDistributeField(), DMPlexDistributeData()
821@*/
822PetscErrorCode DMPlexDistributeFieldIS(DM dm, PetscSF pointSF, PetscSection originalSection, IS originalIS, PetscSection newSection, IS *newIS)
823{
824 PetscSF fieldSF;
825 PetscInt *newValues, *remoteOffsets, fieldSize;
826 const PetscInt *originalValues;
827 PetscErrorCode ierr;
828
829 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 829; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
830 ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeField].active) ? (*PetscLogPLB)((DMPLEX_DistributeField
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),830,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
831 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),831,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
832
833 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),833,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
834 ierr = PetscMalloc1(fieldSize, &newValues)PetscMallocA(1,PETSC_FALSE,834,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(fieldSize)*sizeof(**(&newValues)),(&newValues
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),834,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
835
836 ierr = ISGetIndices(originalIS, &originalValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),836,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
837 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),837,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
838 ierr = PetscFree(remoteOffsets)((*PetscTrFree)((void*)(remoteOffsets),838,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsets) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),838,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
839 ierr = PetscSFBcastBegin(fieldSF, MPIU_INT((MPI_Datatype)0x4c000405), (PetscInt *) originalValues, newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),839,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
840 ierr = PetscSFBcastEnd(fieldSF, MPIU_INT((MPI_Datatype)0x4c000405), (PetscInt *) originalValues, newValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),840,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
841 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),841,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
842 ierr = ISRestoreIndices(originalIS, &originalValues);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),842,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
843 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) pointSF), fieldSize, newValues, PETSC_OWN_POINTER, newIS);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),843,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
844 ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeField].active) ? (*PetscLogPLE)((DMPLEX_DistributeField
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),844,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
845 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
846}
847
848/*@
849 DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution
850
851 Collective on DM
852
853 Input Parameters:
854+ dm - The DMPlex object
855. pointSF - The PetscSF describing the communication pattern
856. originalSection - The PetscSection for existing data layout
857. datatype - The type of data
858- originalData - The existing data
859
860 Output Parameters:
861+ newSection - The PetscSection describing the new data layout
862- newData - The new data
863
864 Level: developer
865
866.seealso: DMPlexDistribute(), DMPlexDistributeField()
867@*/
868PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
869{
870 PetscSF fieldSF;
871 PetscInt *remoteOffsets, fieldSize;
872 PetscMPIInt dataSize;
873 PetscErrorCode ierr;
874
875 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 875; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
876 ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeData].active) ? (*PetscLogPLB)((DMPLEX_DistributeData
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),876,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
877 ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),877,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
878
879 ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),879,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
880 ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),880,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
881 ierr = PetscMalloc(fieldSize * dataSize, newData)((*PetscTrMalloc)((fieldSize * dataSize),881,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(void**)(newData)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),881,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
882
883 ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),883,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
884 ierr = PetscFree(remoteOffsets)((*PetscTrFree)((void*)(remoteOffsets),884,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsets) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),884,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
885 ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),885,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
886 ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),886,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
887 ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),887,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
888 ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeData].active) ? (*PetscLogPLE)((DMPLEX_DistributeData
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),888,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
889 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
890}
891
892static PetscErrorCode DMPlexDistributeCones(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
893{
894 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data;
895 MPI_Comm comm;
896 PetscSF coneSF;
897 PetscSection originalConeSection, newConeSection;
898 PetscInt *remoteOffsets, *cones, *globCones, *newCones, newConesSize;
899 PetscBool flg;
900 PetscErrorCode ierr;
901
902 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 902; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
903 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),903,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),903,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),903,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),903,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
904 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),904,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",5); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),904,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),904,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),904,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,5); } } while (0)
;
905
906 ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeCones].active) ? (*PetscLogPLB)((DMPLEX_DistributeCones
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),906,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
907 /* Distribute cone section */
908 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),908,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
909 ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),909,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
910 ierr = DMPlexGetConeSection(dmParallel, &newConeSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),910,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
911 ierr = PetscSFDistributeSection(migrationSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),911,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
912 ierr = DMSetUp(dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),912,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
913 {
914 PetscInt pStart, pEnd, p;
915
916 ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),916,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
917 for (p = pStart; p < pEnd; ++p) {
918 PetscInt coneSize;
919 ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),919,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
920 pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize)(((pmesh->maxConeSize)<(coneSize)) ? (coneSize) : (pmesh
->maxConeSize))
;
921 }
922 }
923 /* Communicate and renumber cones */
924 ierr = PetscSFCreateSectionSF(migrationSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),924,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
925 ierr = PetscFree(remoteOffsets)((*PetscTrFree)((void*)(remoteOffsets),925,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsets) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),925,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
926 ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),926,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
927 if (original) {
928 PetscInt numCones;
929
930 ierr = PetscSectionGetStorageSize(originalConeSection,&numCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),930,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
931 ierr = PetscMalloc1(numCones,&globCones)PetscMallocA(1,PETSC_FALSE,931,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numCones)*sizeof(**(&globCones)),(&globCones
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),931,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
932 ierr = ISLocalToGlobalMappingApplyBlock(original, numCones, cones, globCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),932,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
933 } else {
934 globCones = cones;
935 }
936 ierr = DMPlexGetCones(dmParallel, &newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),936,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
937 ierr = PetscSFBcastBegin(coneSF, MPIU_INT((MPI_Datatype)0x4c000405), globCones, newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),937,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
938 ierr = PetscSFBcastEnd(coneSF, MPIU_INT((MPI_Datatype)0x4c000405), globCones, newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),938,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
939 if (original) {
940 ierr = PetscFree(globCones)((*PetscTrFree)((void*)(globCones),940,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((globCones) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),940,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
941 }
942 ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),942,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
943 ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL((void*)0), newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),943,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
944#if defined(PETSC_USE_DEBUG1)
945 {
946 PetscInt p;
947 PetscBool valid = PETSC_TRUE;
948 for (p = 0; p < newConesSize; ++p) {
949 if (newCones[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF((MPI_Comm)0x44000001), "[%d] Point %D not in overlap SF\n", PetscGlobalRank,p);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),949,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
950 }
951 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map")return PetscError(((MPI_Comm)0x44000001),951,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,63,PETSC_ERROR_INITIAL,"Invalid global to local map")
;
952 }
953#endif
954 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),954,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
955 if (flg) {
956 ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),956,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
957 ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLDPETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),957,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
958 ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),958,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
959 ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLDPETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),959,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
960 ierr = PetscSFView(coneSF, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),960,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
961 }
962 ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),962,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
963 ierr = DMPlexGetConeOrientations(dmParallel, &newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),963,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
964 ierr = PetscSFBcastBegin(coneSF, MPIU_INT((MPI_Datatype)0x4c000405), cones, newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),964,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
965 ierr = PetscSFBcastEnd(coneSF, MPIU_INT((MPI_Datatype)0x4c000405), cones, newCones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),965,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
966 ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),966,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
967 ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeCones].active) ? (*PetscLogPLE)((DMPLEX_DistributeCones
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),967,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
968 /* Create supports and stratify DMPlex */
969 {
970 PetscInt pStart, pEnd;
971
972 ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),972,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
973 ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),973,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
974 }
975 ierr = DMPlexSymmetrize(dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),975,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
976 ierr = DMPlexStratify(dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),976,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
977 {
978 PetscBool useCone, useClosure, useAnchors;
979
980 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),980,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
981 ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),981,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
982 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),982,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
983 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),983,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
984 }
985 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
986}
987
988static PetscErrorCode DMPlexDistributeCoordinates(DM dm, PetscSF migrationSF, DM dmParallel)
989{
990 MPI_Comm comm;
991 PetscSection originalCoordSection, newCoordSection;
992 Vec originalCoordinates, newCoordinates;
993 PetscInt bs;
994 PetscBool isper;
995 const char *name;
996 const PetscReal *maxCell, *L;
997 const DMBoundaryType *bd;
998 PetscErrorCode ierr;
999
1000 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1000; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1001 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1001,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1001,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1001,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1001,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1002 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1002,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1002,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1002,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1002,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,3); } } while (0)
;
1003
1004 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1004,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1005 ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1005,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1006 ierr = DMGetCoordinateSection(dmParallel, &newCoordSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1006,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1007 ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1007,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1008 if (originalCoordinates) {
1009 ierr = VecCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), &newCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1009,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1010 ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1010,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1011 ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1011,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1012
1013 ierr = DMPlexDistributeField(dm, migrationSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1013,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1014 ierr = DMSetCoordinatesLocal(dmParallel, newCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1014,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1015 ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1015,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1016 ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1016,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1017 ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1017,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1018 }
1019 ierr = DMGetPeriodicity(dm, &isper, &maxCell, &L, &bd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1019,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1020 ierr = DMSetPeriodicity(dmParallel, isper, maxCell, L, bd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1020,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1021 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1022}
1023
1024/* Here we are assuming that process 0 always has everything */
1025static PetscErrorCode DMPlexDistributeLabels(DM dm, PetscSF migrationSF, DM dmParallel)
1026{
1027 DM_Plex *mesh = (DM_Plex*) dm->data;
1028 MPI_Comm comm;
1029 DMLabel depthLabel;
1030 PetscMPIInt rank;
1031 PetscInt depth, d, numLabels, numLocalLabels, l;
1032 PetscBool hasLabels = PETSC_FALSE, lsendDepth, sendDepth;
1033 PetscObjectState depthState = -1;
1034 PetscErrorCode ierr;
1035
1036 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1036; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1037 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1037,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1037,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1037,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1037,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1038 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1038,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1038,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1038,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1038,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,3); } } while (0)
;
1039
1040 ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeLabels].active) ? (*PetscLogPLB)((DMPLEX_DistributeLabels
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1040,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1041 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1041,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1042 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1042,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1043
1044 /* If the user has changed the depth label, communicate it instead */
1045 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1045,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1046 ierr = DMPlexGetDepthLabel(dm, &depthLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1046,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1047 if (depthLabel) {ierr = PetscObjectStateGet((PetscObject) depthLabel, &depthState);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1047,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1048 lsendDepth = mesh->depthState != depthState ? PETSC_TRUE : PETSC_FALSE;
1049 ierr = MPIU_Allreduce(&lsendDepth, &sendDepth, 1, MPIU_BOOL, MPI_LOR, comm)(PetscAllreduceBarrierCheck(comm,1,1049,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&lsendDepth),(&sendDepth),(1),(MPIU_BOOL
),((MPI_Op)(0x58000007)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1049,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1050 if (sendDepth) {
1051 ierr = DMRemoveLabel(dmParallel, "depth", &depthLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1051,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1052 ierr = DMLabelDestroy(&depthLabel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1052,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1053 }
1054 /* Everyone must have either the same number of labels, or none */
1055 ierr = DMGetNumLabels(dm, &numLocalLabels);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1055,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1056 numLabels = numLocalLabels;
1057 ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm)((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) || MPI_Bcast
((&numLabels),(1),(((MPI_Datatype)0x4c000405)),(0),(comm)
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1057,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1058 if (numLabels == numLocalLabels) hasLabels = PETSC_TRUE;
1059 for (l = numLabels-1; l >= 0; --l) {
1060 DMLabel label = NULL((void*)0), labelNew = NULL((void*)0);
1061 PetscBool isDepth, lisOutput = PETSC_TRUE, isOutput;
1062 const char *name = NULL((void*)0);
1063
1064 if (hasLabels) {
1065 ierr = DMGetLabelByNum(dm, l, &label);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1065,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1066 /* Skip "depth" because it is recreated */
1067 ierr = PetscObjectGetName((PetscObject) label, &name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1067,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1068 ierr = PetscStrcmp(name, "depth", &isDepth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1068,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1069 }
1070 ierr = MPI_Bcast(&isDepth, 1, MPIU_BOOL, 0, comm)((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) || MPI_Bcast
((&isDepth),(1),(MPIU_BOOL),(0),(comm)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1070,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1071 if (isDepth && !sendDepth) continue;
1072 ierr = DMLabelDistribute(label, migrationSF, &labelNew);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1072,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1073 if (isDepth) {
1074 /* Put in any missing strata which can occur if users are managing the depth label themselves */
1075 PetscInt gdepth;
1076
1077 ierr = MPIU_Allreduce(&depth, &gdepth, 1, MPIU_INT, MPI_MAX, comm)(PetscAllreduceBarrierCheck(comm,1,1077,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&depth),(&gdepth),(1),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1077,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1078 if ((depth >= 0) && (gdepth != depth)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Inconsistent Plex depth %d != %d", depth, gdepth)return PetscError(((MPI_Comm)0x44000001),1078,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Inconsistent Plex depth %d != %d",depth
,gdepth)
;
1079 for (d = 0; d <= gdepth; ++d) {
1080 PetscBool has;
1081
1082 ierr = DMLabelHasStratum(labelNew, d, &has);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1082,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1083 if (!has) {ierr = DMLabelAddStratum(labelNew, d);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1083,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1084 }
1085 }
1086 ierr = DMAddLabel(dmParallel, labelNew);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1086,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1087 /* Put the output flag in the new label */
1088 if (hasLabels) {ierr = DMGetLabelOutput(dm, name, &lisOutput);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1088,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1089 ierr = MPIU_Allreduce(&lisOutput, &isOutput, 1, MPIU_BOOL, MPI_LAND, comm)(PetscAllreduceBarrierCheck(comm,1,1089,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&lisOutput),(&isOutput),(1),(MPIU_BOOL
),((MPI_Op)(0x58000005)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1089,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1090 ierr = PetscObjectGetName((PetscObject) labelNew, &name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1090,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1091 ierr = DMSetLabelOutput(dmParallel, name, isOutput);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1091,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1092 }
1093 ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeLabels].active) ? (*PetscLogPLE)((DMPLEX_DistributeLabels
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1093,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1094 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1095}
1096
1097static PetscErrorCode DMPlexDistributeSetupHybrid(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping renumbering, DM dmParallel)
1098{
1099 DM_Plex *mesh = (DM_Plex*) dm->data;
1100 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data;
1101 PetscBool *isHybrid, *isHybridParallel;
1102 PetscInt dim, depth, d;
1103 PetscInt pStart, pEnd, pStartP, pEndP;
1104 PetscErrorCode ierr;
1105
1106 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1106; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1107 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1107,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1107,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1107,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1107,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1108 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1108,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1108,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1108,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1108,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,3); } } while (0)
;
1109
1110 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1110,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1111 ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1111,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1112 ierr = DMPlexGetChart(dm,&pStart,&pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1112,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1113 ierr = DMPlexGetChart(dmParallel,&pStartP,&pEndP);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1113,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1114 ierr = PetscCalloc2(pEnd-pStart,&isHybrid,pEndP-pStartP,&isHybridParallel)PetscMallocA(2,PETSC_TRUE,1114,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(pEnd-pStart)*sizeof(**(&isHybrid)),(&isHybrid
),(size_t)(pEndP-pStartP)*sizeof(**(&isHybridParallel)),(
&isHybridParallel))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1114,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1115 for (d = 0; d <= depth; d++) {
1116 PetscInt hybridMax = (depth == 1 && d == 1) ? mesh->hybridPointMax[dim] : mesh->hybridPointMax[d];
1117
1118 if (hybridMax >= 0) {
1119 PetscInt sStart, sEnd, p;
1120
1121 ierr = DMPlexGetDepthStratum(dm,d,&sStart,&sEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1121,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1122 for (p = hybridMax; p < sEnd; p++) isHybrid[p-pStart] = PETSC_TRUE;
1123 }
1124 }
1125 ierr = PetscSFBcastBegin(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1125,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1126 ierr = PetscSFBcastEnd(migrationSF,MPIU_BOOL,isHybrid,isHybridParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1126,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1127 for (d = 0; d <= dim; d++) pmesh->hybridPointMax[d] = -1;
1128 for (d = 0; d <= depth; d++) {
1129 PetscInt sStart, sEnd, p, dd;
1130
1131 ierr = DMPlexGetDepthStratum(dmParallel,d,&sStart,&sEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1131,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1132 dd = (depth == 1 && d == 1) ? dim : d;
1133 for (p = sStart; p < sEnd; p++) {
1134 if (isHybridParallel[p-pStartP]) {
1135 pmesh->hybridPointMax[dd] = p;
1136 break;
1137 }
1138 }
1139 }
1140 ierr = PetscFree2(isHybrid,isHybridParallel)PetscFreeA(2,1140,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,&(isHybrid),&(isHybridParallel))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1140,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1141 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1142}
1143
1144static PetscErrorCode DMPlexDistributeSetupTree(DM dm, PetscSF migrationSF, ISLocalToGlobalMapping original, ISLocalToGlobalMapping renumbering, DM dmParallel)
1145{
1146 DM_Plex *mesh = (DM_Plex*) dm->data;
1147 DM_Plex *pmesh = (DM_Plex*) (dmParallel)->data;
1148 MPI_Comm comm;
1149 DM refTree;
1150 PetscSection origParentSection, newParentSection;
1151 PetscInt *origParents, *origChildIDs;
1152 PetscBool flg;
1153 PetscErrorCode ierr;
1154
1155 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1155; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1156 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1156,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1156,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1156,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1156,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1157 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 5)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1157,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",5); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1157,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1157,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),1157,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,5); } } while (0)
;
1158 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1158,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1159
1160 /* Set up tree */
1161 ierr = DMPlexGetReferenceTree(dm,&refTree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1161,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1162 ierr = DMPlexSetReferenceTree(dmParallel,refTree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1162,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1163 ierr = DMPlexGetTree(dm,&origParentSection,&origParents,&origChildIDs,NULL((void*)0),NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1163,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1164 if (origParentSection) {
1165 PetscInt pStart, pEnd;
1166 PetscInt *newParents, *newChildIDs, *globParents;
1167 PetscInt *remoteOffsetsParents, newParentSize;
1168 PetscSF parentSF;
1169
1170 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1170,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1171 ierr = PetscSectionCreate(PetscObjectComm((PetscObject)dmParallel),&newParentSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1171,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1172 ierr = PetscSectionSetChart(newParentSection,pStart,pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1172,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1173 ierr = PetscSFDistributeSection(migrationSF, origParentSection, &remoteOffsetsParents, newParentSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1173,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1174 ierr = PetscSFCreateSectionSF(migrationSF, origParentSection, remoteOffsetsParents, newParentSection, &parentSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1174,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1175 ierr = PetscFree(remoteOffsetsParents)((*PetscTrFree)((void*)(remoteOffsetsParents),1175,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((remoteOffsetsParents) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1175,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1176 ierr = PetscSectionGetStorageSize(newParentSection,&newParentSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1176,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1177 ierr = PetscMalloc2(newParentSize,&newParents,newParentSize,&newChildIDs)PetscMallocA(2,PETSC_FALSE,1177,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(newParentSize)*sizeof(**(&newParents)),(&newParents
),(size_t)(newParentSize)*sizeof(**(&newChildIDs)),(&
newChildIDs))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1177,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1178 if (original) {
1179 PetscInt numParents;
1180
1181 ierr = PetscSectionGetStorageSize(origParentSection,&numParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1181,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1182 ierr = PetscMalloc1(numParents,&globParents)PetscMallocA(1,PETSC_FALSE,1182,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numParents)*sizeof(**(&globParents)),(&globParents
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1182,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1183 ierr = ISLocalToGlobalMappingApplyBlock(original, numParents, origParents, globParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1183,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1184 }
1185 else {
1186 globParents = origParents;
1187 }
1188 ierr = PetscSFBcastBegin(parentSF, MPIU_INT((MPI_Datatype)0x4c000405), globParents, newParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1188,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1189 ierr = PetscSFBcastEnd(parentSF, MPIU_INT((MPI_Datatype)0x4c000405), globParents, newParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1189,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1190 if (original) {
1191 ierr = PetscFree(globParents)((*PetscTrFree)((void*)(globParents),1191,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((globParents) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1191,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1192 }
1193 ierr = PetscSFBcastBegin(parentSF, MPIU_INT((MPI_Datatype)0x4c000405), origChildIDs, newChildIDs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1193,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1194 ierr = PetscSFBcastEnd(parentSF, MPIU_INT((MPI_Datatype)0x4c000405), origChildIDs, newChildIDs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1194,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1195 ierr = ISGlobalToLocalMappingApplyBlock(renumbering,IS_GTOLM_MASK, newParentSize, newParents, NULL((void*)0), newParents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1195,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1196#if defined(PETSC_USE_DEBUG1)
1197 {
1198 PetscInt p;
1199 PetscBool valid = PETSC_TRUE;
1200 for (p = 0; p < newParentSize; ++p) {
1201 if (newParents[p] < 0) {valid = PETSC_FALSE; ierr = PetscPrintf(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Point %d not in overlap SF\n", p);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1201,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1202 }
1203 if (!valid) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Invalid global to local map")return PetscError(((MPI_Comm)0x44000001),1203,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,63,PETSC_ERROR_INITIAL,"Invalid global to local map")
;
1204 }
1205#endif
1206 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-parents_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1206,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1207 if (flg) {
1208 ierr = PetscPrintf(comm, "Serial Parent Section: \n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1208,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1209 ierr = PetscSectionView(origParentSection, PETSC_VIEWER_STDOUT_WORLDPETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1209,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1210 ierr = PetscPrintf(comm, "Parallel Parent Section: \n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1210,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1211 ierr = PetscSectionView(newParentSection, PETSC_VIEWER_STDOUT_WORLDPETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1211,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1212 ierr = PetscSFView(parentSF, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1212,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1213 }
1214 ierr = DMPlexSetTree(dmParallel,newParentSection,newParents,newChildIDs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1214,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1215 ierr = PetscSectionDestroy(&newParentSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1215,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1216 ierr = PetscFree2(newParents,newChildIDs)PetscFreeA(2,1216,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,&(newParents),&(newChildIDs))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1216,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1217 ierr = PetscSFDestroy(&parentSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1217,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1218 }
1219 pmesh->useAnchors = mesh->useAnchors;
1220 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1221}
1222
1223PETSC_UNUSED__attribute((unused)) static PetscErrorCode DMPlexDistributeSF(DM dm, PetscSF migrationSF, DM dmParallel)
1224{
1225 PetscMPIInt rank, size;
1226 MPI_Comm comm;
1227 PetscErrorCode ierr;
1228
1229 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1229; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1230 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1230,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1230,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1230,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1230,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1231 PetscValidHeaderSpecific(dmParallel, DM_CLASSID, 3)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1231,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(dmParallel,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),1231,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(dmParallel))->classid != DM_CLASSID
) { if (((PetscObject)(dmParallel))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),1231,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1231,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,3); } } while (0)
;
1232
1233 /* Create point SF for parallel mesh */
1234 ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeSF].active) ? (*PetscLogPLB)((DMPLEX_DistributeSF
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1234,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1235 ierr = PetscObjectGetComm((PetscObject)dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1235,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1236 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1236,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1237 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1237,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1238 {
1239 const PetscInt *leaves;
1240 PetscSFNode *remotePoints, *rowners, *lowners;
1241 PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
1242 PetscInt pStart, pEnd;
1243
1244 ierr = DMPlexGetChart(dmParallel, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1244,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1245 ierr = PetscSFGetGraph(migrationSF, &numRoots, &numLeaves, &leaves, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1245,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1246 ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners)PetscMallocA(2,PETSC_FALSE,1246,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numRoots)*sizeof(**(&rowners)),(&rowners),(
size_t)(numLeaves)*sizeof(**(&lowners)),(&lowners))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1246,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1247 for (p=0; p<numRoots; p++) {
1248 rowners[p].rank = -1;
1249 rowners[p].index = -1;
1250 }
1251 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rowners, lowners);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1251,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1252 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rowners, lowners);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1252,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1253 for (p = 0; p < numLeaves; ++p) {
1254 if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
1255 lowners[p].rank = rank;
1256 lowners[p].index = leaves ? leaves[p] : p;
1257 } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
1258 lowners[p].rank = -2;
1259 lowners[p].index = -2;
1260 }
1261 }
1262 for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
1263 rowners[p].rank = -3;
1264 rowners[p].index = -3;
1265 }
1266 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), lowners, rowners, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1266,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1267 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), lowners, rowners, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1267,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1268 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rowners, lowners);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1268,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1269 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rowners, lowners);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1269,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1270 for (p = 0; p < numLeaves; ++p) {
1271 if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed")return PetscError(((MPI_Comm)0x44000001),1271,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,77,PETSC_ERROR_INITIAL,"Cell partition corrupt: point not claimed"
)
;
1272 if (lowners[p].rank != rank) ++numGhostPoints;
1273 }
1274 ierr = PetscMalloc1(numGhostPoints, &ghostPoints)PetscMallocA(1,PETSC_FALSE,1274,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numGhostPoints)*sizeof(**(&ghostPoints)),(&
ghostPoints))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1274,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1275 ierr = PetscMalloc1(numGhostPoints, &remotePoints)PetscMallocA(1,PETSC_FALSE,1275,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numGhostPoints)*sizeof(**(&remotePoints)),(&
remotePoints))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1275,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1276 for (p = 0, gp = 0; p < numLeaves; ++p) {
1277 if (lowners[p].rank != rank) {
1278 ghostPoints[gp] = leaves ? leaves[p] : p;
1279 remotePoints[gp].rank = lowners[p].rank;
1280 remotePoints[gp].index = lowners[p].index;
1281 ++gp;
1282 }
1283 }
1284 ierr = PetscFree2(rowners,lowners)PetscFreeA(2,1284,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,&(rowners),&(lowners))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1284,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1285 ierr = PetscSFSetGraph((dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1285,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1286 ierr = PetscSFSetFromOptions((dmParallel)->sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1286,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1287 }
1288 {
1289 PetscBool useCone, useClosure, useAnchors;
1290
1291 ierr = DMGetBasicAdjacency(dm, &useCone, &useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1291,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1292 ierr = DMSetBasicAdjacency(dmParallel, useCone, useClosure);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1292,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1293 ierr = DMPlexGetAdjacencyUseAnchors(dm, &useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1293,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1294 ierr = DMPlexSetAdjacencyUseAnchors(dmParallel, useAnchors);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1294,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1295 }
1296 ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeSF].active) ? (*PetscLogPLE)((DMPLEX_DistributeSF
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1296,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1297 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1298}
1299
1300/*@
1301 DMPlexSetPartitionBalance - Should distribution of the DM attempt to balance the shared point partition?
1302
1303 Input Parameters:
1304+ dm - The DMPlex object
1305- flg - Balance the partition?
1306
1307 Level: intermediate
1308
1309.seealso: DMPlexDistribute(), DMPlexGetPartitionBalance()
1310@*/
1311PetscErrorCode DMPlexSetPartitionBalance(DM dm, PetscBool flg)
1312{
1313 DM_Plex *mesh = (DM_Plex *)dm->data;
1314
1315 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1315; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1316 mesh->partitionBalance = flg;
1317 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1318}
1319
1320/*@
1321 DMPlexGetPartitionBalance - Does distribution of the DM attempt to balance the shared point partition?
1322
1323 Input Parameter:
1324+ dm - The DMPlex object
1325
1326 Output Parameter:
1327+ flg - Balance the partition?
1328
1329 Level: intermediate
1330
1331.seealso: DMPlexDistribute(), DMPlexSetPartitionBalance()
1332@*/
1333PetscErrorCode DMPlexGetPartitionBalance(DM dm, PetscBool *flg)
1334{
1335 DM_Plex *mesh = (DM_Plex *)dm->data;
1336
1337 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1337; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1338 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1338,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1338,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1338,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1338,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1339 PetscValidIntPointer(flg, 2)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),1339,
__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(flg,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1339,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,2); } while (0)
;
1340 *flg = mesh->partitionBalance;
1341 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1342}
1343
1344/*@C
1345 DMPlexDerivePointSF - Build a point SF from an SF describing a point migration
1346
1347 Input Parameter:
1348+ dm - The source DMPlex object
1349. migrationSF - The star forest that describes the parallel point remapping
1350. ownership - Flag causing a vote to determine point ownership
1351
1352 Output Parameter:
1353- pointSF - The star forest describing the point overlap in the remapped DM
1354
1355 Level: developer
1356
1357.seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1358@*/
1359PetscErrorCode DMPlexCreatePointSF(DM dm, PetscSF migrationSF, PetscBool ownership, PetscSF *pointSF)
1360{
1361 PetscMPIInt rank, size;
1362 PetscInt p, nroots, nleaves, idx, npointLeaves;
1363 PetscInt *pointLocal;
1364 const PetscInt *leaves;
1365 const PetscSFNode *roots;
1366 PetscSFNode *rootNodes, *leafNodes, *pointRemote;
1367 Vec shifts;
1368 const PetscInt numShifts = 13759;
1369 const PetscScalar *shift = NULL((void*)0);
1370 const PetscBool shiftDebug = PETSC_FALSE;
1371 PetscBool balance;
1372 PetscErrorCode ierr;
1373
1374 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1374; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1375 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1375,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1375,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1375,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1375,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1376 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1376,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1377 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1377,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1378
1379 ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1379,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1380 ierr = PetscSFGetGraph(migrationSF, &nroots, &nleaves, &leaves, &roots);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1380,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1381 ierr = PetscMalloc2(nroots, &rootNodes, nleaves, &leafNodes)PetscMallocA(2,PETSC_FALSE,1381,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nroots)*sizeof(**(&rootNodes)),(&rootNodes)
,(size_t)(nleaves)*sizeof(**(&leafNodes)),(&leafNodes
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1381,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1382 if (ownership) {
1383 /* If balancing, we compute a random cyclic shift of the rank for each remote point. That way, the max will evenly distribute among ranks. */
1384 if (balance) {
1385 PetscRandom r;
1386
1387 ierr = PetscRandomCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), &r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1387,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1388 ierr = PetscRandomSetInterval(r, 0, 2467*size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1388,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1389 ierr = VecCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), &shifts);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1389,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1390 ierr = VecSetSizes(shifts, numShifts, numShifts);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1390,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1391 ierr = VecSetType(shifts, VECSTANDARD"standard");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1391,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1392 ierr = VecSetRandom(shifts, r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1392,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1393 ierr = PetscRandomDestroy(&r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1393,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1394 ierr = VecGetArrayRead(shifts, &shift);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1394,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1395 }
1396
1397 /* Point ownership vote: Process with highest rank owns shared points */
1398 for (p = 0; p < nleaves; ++p) {
1399 if (shiftDebug) {
1400 ierr = PetscSynchronizedPrintf(PetscObjectComm((PetscObject) dm), "[%d] Point %D RemotePoint %D Shift %D MyRank %D\n", rank, leaves ? leaves[p] : p, roots[p].index, (PetscInt) PetscRealPart(shift[roots[p].index%numShifts])(shift[roots[p].index%numShifts]), (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts])(shift[roots[p].index%numShifts]) : 0))%size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1400,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1401 }
1402 /* Either put in a bid or we know we own it */
1403 leafNodes[p].rank = (rank + (shift ? (PetscInt) PetscRealPart(shift[roots[p].index%numShifts])(shift[roots[p].index%numShifts]) : 0))%size;
1404 leafNodes[p].index = p;
1405 }
1406 for (p = 0; p < nroots; p++) {
1407 /* Root must not participate in the reduction, flag so that MAXLOC does not use */
1408 rootNodes[p].rank = -3;
1409 rootNodes[p].index = -3;
1410 }
1411 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), leafNodes, rootNodes, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1411,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1412 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), leafNodes, rootNodes, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1412,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1413 if (balance) {
1414 /* We've voted, now we need to get the rank. When we're balancing the partition, the "rank" in rootNotes is not
1415 * the rank but rather (rank + random)%size. So we do another reduction, voting the same way, but sending the
1416 * rank instead of the index. */
1417 PetscSFNode *rootRanks = NULL((void*)0);
1418 ierr = PetscMalloc1(nroots, &rootRanks)PetscMallocA(1,PETSC_FALSE,1418,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nroots)*sizeof(**(&rootRanks)),(&rootRanks)
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1418,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1419 for (p = 0; p < nroots; p++) {
1420 rootRanks[p].rank = -3;
1421 rootRanks[p].index = -3;
1422 }
1423 for (p = 0; p < nleaves; p++) leafNodes[p].index = rank;
1424 ierr = PetscSFReduceBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), leafNodes, rootRanks, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1424,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1425 ierr = PetscSFReduceEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), leafNodes, rootRanks, MPI_MAXLOC(MPI_Op)(0x5800000c));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1425,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1426 for (p = 0; p < nroots; p++) rootNodes[p].rank = rootRanks[p].index;
1427 ierr = PetscFree(rootRanks)((*PetscTrFree)((void*)(rootRanks),1427,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((rootRanks) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1427,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1428 }
1429 } else {
1430 for (p = 0; p < nroots; p++) {
1431 rootNodes[p].index = -1;
1432 rootNodes[p].rank = rank;
1433 };
1434 for (p = 0; p < nleaves; p++) {
1435 /* Write new local id into old location */
1436 if (roots[p].rank == rank) {
1437 rootNodes[roots[p].index].index = leaves ? leaves[p] : p;
1438 }
1439 }
1440 }
1441 ierr = PetscSFBcastBegin(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rootNodes, leafNodes);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1441,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1442 ierr = PetscSFBcastEnd(migrationSF, MPIU_2INT((MPI_Datatype)0x4c000816), rootNodes, leafNodes);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1442,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1443
1444 for (npointLeaves = 0, p = 0; p < nleaves; p++) {
1445 if (leafNodes[p].rank != rank) npointLeaves++;
1446 }
1447 ierr = PetscMalloc1(npointLeaves, &pointLocal)PetscMallocA(1,PETSC_FALSE,1447,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(npointLeaves)*sizeof(**(&pointLocal)),(&pointLocal
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1447,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1448 ierr = PetscMalloc1(npointLeaves, &pointRemote)PetscMallocA(1,PETSC_FALSE,1448,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(npointLeaves)*sizeof(**(&pointRemote)),(&pointRemote
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1448,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1449 for (idx = 0, p = 0; p < nleaves; p++) {
1450 if (leafNodes[p].rank != rank) {
1451 pointLocal[idx] = p;
1452 pointRemote[idx] = leafNodes[p];
1453 idx++;
1454 }
1455 }
1456 if (shift) {
1457 ierr = VecRestoreArrayRead(shifts, &shift);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1457,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1458 ierr = VecDestroy(&shifts);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1458,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1459 }
1460 if (shiftDebug) {ierr = PetscSynchronizedFlush(PetscObjectComm((PetscObject) dm), PETSC_STDOUT);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1460,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1461 ierr = PetscSFCreate(PetscObjectComm((PetscObject) dm), pointSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1461,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1462 ierr = PetscSFSetFromOptions(*pointSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1462,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1463 ierr = PetscSFSetGraph(*pointSF, nleaves, npointLeaves, pointLocal, PETSC_OWN_POINTER, pointRemote, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1463,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1464 ierr = PetscFree2(rootNodes, leafNodes)PetscFreeA(2,1464,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,&(rootNodes),&(leafNodes))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1464,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1465 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1466}
1467
1468/*@C
1469 DMPlexMigrate - Migrates internal DM data over the supplied star forest
1470
1471 Collective on DM and PetscSF
1472
1473 Input Parameter:
1474+ dm - The source DMPlex object
1475. sf - The star forest communication context describing the migration pattern
1476
1477 Output Parameter:
1478- targetDM - The target DMPlex object
1479
1480 Level: intermediate
1481
1482.seealso: DMPlexDistribute(), DMPlexDistributeOverlap()
1483@*/
1484PetscErrorCode DMPlexMigrate(DM dm, PetscSF sf, DM targetDM)
1485{
1486 MPI_Comm comm;
1487 PetscInt dim, cdim, nroots;
1488 PetscSF sfPoint;
1489 ISLocalToGlobalMapping ltogMigration;
1490 ISLocalToGlobalMapping ltogOriginal = NULL((void*)0);
1491 PetscBool flg;
1492 PetscErrorCode ierr;
1493
1494 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1494; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1495 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1495,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1495,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1495,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1495,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1496 ierr = PetscLogEventBegin(DMPLEX_Migrate, dm, 0, 0, 0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Migrate].active) ? (*PetscLogPLB)((DMPLEX_Migrate),0,
(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1496,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1497 ierr = PetscObjectGetComm((PetscObject) dm, &comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1497,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1498 ierr = DMGetDimension(dm, &dim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1498,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1499 ierr = DMSetDimension(targetDM, dim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1499,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1500 ierr = DMGetCoordinateDim(dm, &cdim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1500,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1501 ierr = DMSetCoordinateDim(targetDM, cdim);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1501,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1502
1503 /* Check for a one-to-all distribution pattern */
1504 ierr = DMGetPointSF(dm, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1504,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1505 ierr = PetscSFGetGraph(sfPoint, &nroots, NULL((void*)0), NULL((void*)0), NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1505,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1506 if (nroots >= 0) {
1507 IS isOriginal;
1508 PetscInt n, size, nleaves;
1509 PetscInt *numbering_orig, *numbering_new;
1510
1511 /* Get the original point numbering */
1512 ierr = DMPlexCreatePointNumbering(dm, &isOriginal);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1512,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1513 ierr = ISLocalToGlobalMappingCreateIS(isOriginal, &ltogOriginal);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1513,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1514 ierr = ISLocalToGlobalMappingGetSize(ltogOriginal, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1514,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1515 ierr = ISLocalToGlobalMappingGetBlockIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1515,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1516 /* Convert to positive global numbers */
1517 for (n=0; n<size; n++) {if (numbering_orig[n] < 0) numbering_orig[n] = -(numbering_orig[n]+1);}
1518 /* Derive the new local-to-global mapping from the old one */
1519 ierr = PetscSFGetGraph(sf, NULL((void*)0), &nleaves, NULL((void*)0), NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1519,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1520 ierr = PetscMalloc1(nleaves, &numbering_new)PetscMallocA(1,PETSC_FALSE,1520,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nleaves)*sizeof(**(&numbering_new)),(&numbering_new
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1520,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1521 ierr = PetscSFBcastBegin(sf, MPIU_INT((MPI_Datatype)0x4c000405), (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1521,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1522 ierr = PetscSFBcastEnd(sf, MPIU_INT((MPI_Datatype)0x4c000405), (PetscInt *) numbering_orig, numbering_new);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1522,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1523 ierr = ISLocalToGlobalMappingCreate(comm, 1, nleaves, (const PetscInt*) numbering_new, PETSC_OWN_POINTER, &ltogMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1523,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1524 ierr = ISLocalToGlobalMappingRestoreIndices(ltogOriginal, (const PetscInt**)&numbering_orig);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1524,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1525 ierr = ISDestroy(&isOriginal);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1525,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1526 } else {
1527 /* One-to-all distribution pattern: We can derive LToG from SF */
1528 ierr = ISLocalToGlobalMappingCreateSF(sf, 0, &ltogMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1528,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1529 }
1530 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1530,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1531 if (flg) {
1532 ierr = PetscPrintf(comm, "Point renumbering for DM migration:\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1532,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1533 ierr = ISLocalToGlobalMappingView(ltogMigration, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1533,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1534 }
1535 /* Migrate DM data to target DM */
1536 ierr = DMPlexDistributeCones(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1536,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1537 ierr = DMPlexDistributeLabels(dm, sf, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1537,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1538 ierr = DMPlexDistributeCoordinates(dm, sf, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1538,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1539 ierr = DMPlexDistributeSetupHybrid(dm, sf, ltogMigration, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1539,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1540 ierr = DMPlexDistributeSetupTree(dm, sf, ltogOriginal, ltogMigration, targetDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1540,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1541 ierr = ISLocalToGlobalMappingDestroy(&ltogOriginal);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1541,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1542 ierr = ISLocalToGlobalMappingDestroy(&ltogMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1542,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1543 ierr = PetscLogEventEnd(DMPLEX_Migrate, dm, 0, 0, 0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Migrate].active) ? (*PetscLogPLE)((DMPLEX_Migrate),0,
(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1543,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1544 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1545}
1546
1547PETSC_INTERNextern __attribute__((visibility ("hidden"))) PetscErrorCode DMPlexPartitionLabelClosure_Private(DM,DMLabel,PetscInt,PetscInt,const PetscInt[],IS*);
1548
1549/*@C
1550 DMPlexDistribute - Distributes the mesh and any associated sections.
1551
1552 Collective on DM
1553
1554 Input Parameter:
1555+ dm - The original DMPlex object
1556- overlap - The overlap of partitions, 0 is the default
1557
1558 Output Parameter:
1559+ sf - The PetscSF used for point distribution, or NULL if not needed
1560- dmParallel - The distributed DMPlex object
1561
1562 Note: If the mesh was not distributed, the output dmParallel will be NULL.
1563
1564 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1565 representation on the mesh.
1566
1567 Level: intermediate
1568
1569.keywords: mesh, elements
1570.seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1571@*/
1572PetscErrorCode DMPlexDistribute(DM dm, PetscInt overlap, PetscSF *sf, DM *dmParallel)
1573{
1574 MPI_Comm comm;
1575 PetscPartitioner partitioner;
1576 IS cellPart;
1577 PetscSection cellPartSection;
1578 DM dmCoord;
1579 DMLabel lblPartition, lblMigration;
1580 PetscSF sfMigration, sfStratified, sfPoint;
1581 PetscBool flg, balance;
1582 PetscMPIInt rank, size;
1583 PetscErrorCode ierr;
1584
1585 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1585; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1586 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1586,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1586,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1586,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1586,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1587 PetscValidLogicalCollectiveInt(dm, overlap, 2)do { PetscErrorCode _7_ierr; PetscInt b1[2],b2[2]; b1[0] = -overlap
; b1[1] = overlap; _7_ierr = (PetscAllreduceBarrierCheck(PetscObjectComm
((PetscObject)dm),2,1587,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)dm))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)dm)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),1587,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)dm),1587,
__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Int value must be same on all processes, argument # %d"
,2); } while (0)
;
1588 if (sf) PetscValidPointer(sf,3)do { if (!sf) return PetscError(((MPI_Comm)0x44000001),1588,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(sf,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),1588,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
1
Assuming 'sf' is null
2
Taking false branch
1589 PetscValidPointer(dmParallel,4)do { if (!dmParallel) return PetscError(((MPI_Comm)0x44000001
),1589,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(dmParallel,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),1589,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
1590
1591 if (sf) *sf = NULL((void*)0);
3
Taking false branch
1592 *dmParallel = NULL((void*)0);
1593 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1593,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1594 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1594,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1595 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1595,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1596 if (size == 1) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4
Assuming 'size' is not equal to 1
5
Taking false branch
1597
1598 ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Distribute].active) ? (*PetscLogPLB)((DMPLEX_Distribute
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1598,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1599 /* Create cell partition */
1600 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Partition].active) ? (*PetscLogPLB)((DMPLEX_Partition
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1600,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1601 ierr = PetscSectionCreate(comm, &cellPartSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1601,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1602 ierr = DMPlexGetPartitioner(dm, &partitioner);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1602,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1603 ierr = PetscPartitionerPartition(partitioner, dm, cellPartSection, &cellPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1603,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1604 {
1605 /* Convert partition to DMLabel */
1606 IS is;
1607 PetscHSetI ht;
1608 PetscInt pStart, pEnd, proc, npoints, poff = 0, nranks, *iranks;
1609 const PetscInt *points;
1610
1611 ierr = DMLabelCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Point Partition", &lblPartition);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1611,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1612 /* Preallocate strata */
1613 ierr = PetscHSetICreate(&ht);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1613,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6
Calling 'PetscHSetICreate'
9
Returning from 'PetscHSetICreate'
1614 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1614,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1615 for (proc = pStart; proc < pEnd; proc++) {
10
Loop condition is true. Entering loop body
13
Loop condition is true. Entering loop body
16
Loop condition is true. Entering loop body
19
Loop condition is true. Entering loop body
1616 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1616,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1617 if (npoints) {ierr = PetscHSetIAdd(ht, proc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1617,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
11
Assuming 'npoints' is 0
12
Taking false branch
14
Assuming 'npoints' is 0
15
Taking false branch
17
Assuming 'npoints' is 0
18
Taking false branch
20
Assuming 'npoints' is not equal to 0
21
Taking true branch
22
Calling 'PetscHSetIAdd'
1618 }
1619 ierr = PetscHSetIGetSize(ht, &nranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1619,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1620 ierr = PetscMalloc1(nranks, &iranks)PetscMallocA(1,PETSC_FALSE,1620,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nranks)*sizeof(**(&iranks)),(&iranks))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1620,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1621 ierr = PetscHSetIGetElems(ht, &poff, iranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1621,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1622 ierr = PetscHSetIDestroy(&ht);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1622,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1623 ierr = DMLabelAddStrata(lblPartition, nranks, iranks);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1623,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1624 ierr = PetscFree(iranks)((*PetscTrFree)((void*)(iranks),1624,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
) || ((iranks) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1624,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1625 /* Inline DMPlexPartitionLabelClosure() */
1626 ierr = ISGetIndices(cellPart, &points);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1626,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1627 ierr = PetscSectionGetChart(cellPartSection, &pStart, &pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1627,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1628 for (proc = pStart; proc < pEnd; proc++) {
1629 ierr = PetscSectionGetDof(cellPartSection, proc, &npoints);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1629,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1630 if (!npoints) continue;
1631 ierr = PetscSectionGetOffset(cellPartSection, proc, &poff);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1631,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1632 ierr = DMPlexPartitionLabelClosure_Private(dm, lblPartition, proc, npoints, points+poff, &is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1632,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1633 ierr = DMLabelSetStratumIS(lblPartition, proc, is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1633,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1634 ierr = ISDestroy(&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1634,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1635 }
1636 ierr = ISRestoreIndices(cellPart, &points);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1636,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1637 }
1638 ierr = DMLabelCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001), "Point migration", &lblMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1638,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1639 ierr = DMPlexPartitionLabelInvert(dm, lblPartition, NULL((void*)0), lblMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1639,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1640 ierr = DMPlexPartitionLabelCreateSF(dm, lblMigration, &sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1640,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1641 /* Stratify the SF in case we are migrating an already parallel plex */
1642 ierr = DMPlexStratifyMigrationSF(dm, sfMigration, &sfStratified);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1642,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1643 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1643,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1644 sfMigration = sfStratified;
1645 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Partition].active) ? (*PetscLogPLE)((DMPLEX_Partition
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1645,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1646 ierr = PetscOptionsHasName(((PetscObject) dm)->options,((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1646,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1647 if (flg) {
1648 ierr = DMLabelView(lblPartition, PETSC_VIEWER_STDOUT_WORLDPETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1648,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1649 ierr = PetscSFView(sfMigration, PETSC_VIEWER_STDOUT_WORLDPETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1649,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1650 }
1651
1652 /* Create non-overlapping parallel DM and migrate internal data */
1653 ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1653,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1654 ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1654,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1655 ierr = DMPlexMigrate(dm, sfMigration, *dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1655,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1656
1657 /* Build the point SF without overlap */
1658 ierr = DMPlexGetPartitionBalance(dm, &balance);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1658,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1659 ierr = DMPlexSetPartitionBalance(*dmParallel, balance);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1659,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1660 ierr = DMPlexCreatePointSF(*dmParallel, sfMigration, PETSC_TRUE, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1660,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1661 ierr = DMSetPointSF(*dmParallel, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1661,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1662 ierr = DMGetCoordinateDM(*dmParallel, &dmCoord);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1662,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1663 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1663,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1664 if (flg) {ierr = PetscSFView(sfPoint, PETSC_VIEWER_STDOUT_WORLDPETSC_VIEWER_STDOUT_(PETSC_COMM_WORLD));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1664,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1665
1666 if (overlap > 0) {
1667 DM dmOverlap;
1668 PetscInt nroots, nleaves;
1669 PetscSFNode *newRemote;
1670 const PetscSFNode *oldRemote;
1671 PetscSF sfOverlap, sfOverlapPoint;
1672 /* Add the partition overlap to the distributed DM */
1673 ierr = DMPlexDistributeOverlap(*dmParallel, overlap, &sfOverlap, &dmOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1673,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1674 ierr = DMDestroy(dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1674,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1675 *dmParallel = dmOverlap;
1676 if (flg) {
1677 ierr = PetscPrintf(comm, "Overlap Migration SF:\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1677,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1678 ierr = PetscSFView(sfOverlap, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1678,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1679 }
1680
1681 /* Re-map the migration SF to establish the full migration pattern */
1682 ierr = PetscSFGetGraph(sfMigration, &nroots, NULL((void*)0), NULL((void*)0), &oldRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1682,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1683 ierr = PetscSFGetGraph(sfOverlap, NULL((void*)0), &nleaves, NULL((void*)0), NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1683,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1684 ierr = PetscMalloc1(nleaves, &newRemote)PetscMallocA(1,PETSC_FALSE,1684,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(nleaves)*sizeof(**(&newRemote)),(&newRemote
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1684,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1685 ierr = PetscSFBcastBegin(sfOverlap, MPIU_2INT((MPI_Datatype)0x4c000816), oldRemote, newRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1685,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1686 ierr = PetscSFBcastEnd(sfOverlap, MPIU_2INT((MPI_Datatype)0x4c000816), oldRemote, newRemote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1686,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1687 ierr = PetscSFCreate(comm, &sfOverlapPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1687,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1688 ierr = PetscSFSetGraph(sfOverlapPoint, nroots, nleaves, NULL((void*)0), PETSC_OWN_POINTER, newRemote, PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1688,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1689 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1689,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1690 ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1690,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1691 sfMigration = sfOverlapPoint;
1692 }
1693 /* Cleanup Partition */
1694 ierr = DMLabelDestroy(&lblPartition);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1694,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1695 ierr = DMLabelDestroy(&lblMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1695,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1696 ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1696,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1697 ierr = ISDestroy(&cellPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1697,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1698 /* Copy BC */
1699 ierr = DMCopyBoundary(dm, *dmParallel);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1699,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1700 /* Create sfNatural */
1701 if (dm->useNatural) {
1702 PetscSection section;
1703
1704 ierr = DMGetSection(dm, &section);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1704,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1705 ierr = DMPlexCreateGlobalToNaturalSF(*dmParallel, section, sfMigration, &(*dmParallel)->sfNatural);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1705,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1706 ierr = DMSetUseNatural(*dmParallel, PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1706,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1707 }
1708 /* Cleanup */
1709 if (sf) {*sf = sfMigration;}
1710 else {ierr = PetscSFDestroy(&sfMigration);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1710,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1711 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1711,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1712 ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Distribute].active) ? (*PetscLogPLE)((DMPLEX_Distribute
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1712,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1713 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1714}
1715
1716/*@C
1717 DMPlexDistributeOverlap - Add partition overlap to a distributed non-overlapping DM.
1718
1719 Collective on DM
1720
1721 Input Parameter:
1722+ dm - The non-overlapping distrbuted DMPlex object
1723- overlap - The overlap of partitions
1724
1725 Output Parameter:
1726+ sf - The PetscSF used for point distribution
1727- dmOverlap - The overlapping distributed DMPlex object, or NULL
1728
1729 Note: If the mesh was not distributed, the return value is NULL.
1730
1731 The user can control the definition of adjacency for the mesh using DMSetAdjacency(). They should choose the combination appropriate for the function
1732 representation on the mesh.
1733
1734 Level: intermediate
1735
1736.keywords: mesh, elements
1737.seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMSetAdjacency()
1738@*/
1739PetscErrorCode DMPlexDistributeOverlap(DM dm, PetscInt overlap, PetscSF *sf, DM *dmOverlap)
1740{
1741 MPI_Comm comm;
1742 PetscMPIInt size, rank;
1743 PetscSection rootSection, leafSection;
1744 IS rootrank, leafrank;
1745 DM dmCoord;
1746 DMLabel lblOverlap;
1747 PetscSF sfOverlap, sfStratified, sfPoint;
1748 PetscErrorCode ierr;
1749
1750 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1750; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1751 PetscValidHeaderSpecific(dm, DM_CLASSID, 1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1751,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1751,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1751,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1751,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1752 if (sf) PetscValidPointer(sf, 3)do { if (!sf) return PetscError(((MPI_Comm)0x44000001),1752,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(sf,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),1752,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
1753 PetscValidPointer(dmOverlap, 4)do { if (!dmOverlap) return PetscError(((MPI_Comm)0x44000001)
,1753,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(dmOverlap,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),1753,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
1754
1755 if (sf) *sf = NULL((void*)0);
1756 *dmOverlap = NULL((void*)0);
1757 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1757,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1758 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1758,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1759 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1759,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1760 if (size == 1) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1761
1762 ierr = PetscLogEventBegin(DMPLEX_DistributeOverlap, dm, 0, 0, 0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeOverlap].active) ? (*PetscLogPLB)((DMPLEX_DistributeOverlap
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1762,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1763 /* Compute point overlap with neighbouring processes on the distributed DM */
1764 ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Partition].active) ? (*PetscLogPLB)((DMPLEX_Partition
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1764,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1765 ierr = PetscSectionCreate(comm, &rootSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1765,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1766 ierr = PetscSectionCreate(comm, &leafSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1766,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1767 ierr = DMPlexDistributeOwnership(dm, rootSection, &rootrank, leafSection, &leafrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1767,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1768 ierr = DMPlexCreateOverlap(dm, overlap, rootSection, rootrank, leafSection, leafrank, &lblOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1768,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1769 /* Convert overlap label to stratified migration SF */
1770 ierr = DMPlexPartitionLabelCreateSF(dm, lblOverlap, &sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1770,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1771 ierr = DMPlexStratifyMigrationSF(dm, sfOverlap, &sfStratified);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1771,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1772 ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1772,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1773 sfOverlap = sfStratified;
1774 ierr = PetscObjectSetName((PetscObject) sfOverlap, "Overlap SF");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1774,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1775 ierr = PetscSFSetFromOptions(sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1775,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1776
1777 ierr = PetscSectionDestroy(&rootSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1777,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1778 ierr = PetscSectionDestroy(&leafSection);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1778,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1779 ierr = ISDestroy(&rootrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1779,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1780 ierr = ISDestroy(&leafrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1780,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1781 ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_Partition].active) ? (*PetscLogPLE)((DMPLEX_Partition
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1781,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1782
1783 /* Build the overlapping DM */
1784 ierr = DMPlexCreate(comm, dmOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1784,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1785 ierr = PetscObjectSetName((PetscObject) *dmOverlap, "Parallel Mesh");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1785,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1786 ierr = DMPlexMigrate(dm, sfOverlap, *dmOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1786,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1787 /* Build the new point SF */
1788 ierr = DMPlexCreatePointSF(*dmOverlap, sfOverlap, PETSC_FALSE, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1788,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1789 ierr = DMSetPointSF(*dmOverlap, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1789,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1790 ierr = DMGetCoordinateDM(*dmOverlap, &dmCoord);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1790,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1791 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1791,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1792 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1792,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1793 /* Cleanup overlap partition */
1794 ierr = DMLabelDestroy(&lblOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1794,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1795 if (sf) *sf = sfOverlap;
1796 else {ierr = PetscSFDestroy(&sfOverlap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1796,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1797 ierr = PetscLogEventEnd(DMPLEX_DistributeOverlap, dm, 0, 0, 0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[DMPLEX_DistributeOverlap].active) ? (*PetscLogPLE)((DMPLEX_DistributeOverlap
),0,(PetscObject)(dm),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1797,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1798 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1799}
1800
1801/*@C
1802 DMPlexGetGatherDM - Get a copy of the DMPlex that gathers all points on the
1803 root process of the original's communicator.
1804
1805 Collective on DM
1806
1807 Input Parameters:
1808. dm - the original DMPlex object
1809
1810 Output Parameters:
1811+ sf - the PetscSF used for point distribution (optional)
1812- gatherMesh - the gathered DM object, or NULL
1813
1814 Level: intermediate
1815
1816.keywords: mesh
1817.seealso: DMPlexDistribute(), DMPlexGetRedundantDM()
1818@*/
1819PetscErrorCode DMPlexGetGatherDM(DM dm, PetscSF *sf, DM *gatherMesh)
1820{
1821 MPI_Comm comm;
1822 PetscMPIInt size;
1823 PetscPartitioner oldPart, gatherPart;
1824 PetscErrorCode ierr;
1825
1826 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1826; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1827 PetscValidHeaderSpecific(dm,DM_CLASSID,1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1827,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1827,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1827,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1827,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1828 PetscValidPointer(gatherMesh,2)do { if (!gatherMesh) return PetscError(((MPI_Comm)0x44000001
),1828,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(gatherMesh,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),1828,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
1829 *gatherMesh = NULL((void*)0);
1830 if (sf) *sf = NULL((void*)0);
1831 comm = PetscObjectComm((PetscObject)dm);
1832 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1832,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1833 if (size == 1) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1834 ierr = DMPlexGetPartitioner(dm,&oldPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1834,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1835 ierr = PetscObjectReference((PetscObject)oldPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1835,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1836 ierr = PetscPartitionerCreate(comm,&gatherPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1836,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1837 ierr = PetscPartitionerSetType(gatherPart,PETSCPARTITIONERGATHER"gather");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1837,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1838 ierr = DMPlexSetPartitioner(dm,gatherPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1838,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1839 ierr = DMPlexDistribute(dm,0,sf,gatherMesh);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1839,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1840
1841 ierr = DMPlexSetPartitioner(dm,oldPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1841,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1842 ierr = PetscPartitionerDestroy(&gatherPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1842,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1843 ierr = PetscPartitionerDestroy(&oldPart);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1843,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1844 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1845}
1846
1847/*@C
1848 DMPlexGetRedundantDM - Get a copy of the DMPlex that is completely copied on each process.
1849
1850 Collective on DM
1851
1852 Input Parameters:
1853. dm - the original DMPlex object
1854
1855 Output Parameters:
1856+ sf - the PetscSF used for point distribution (optional)
1857- redundantMesh - the redundant DM object, or NULL
1858
1859 Level: intermediate
1860
1861.keywords: mesh
1862.seealso: DMPlexDistribute(), DMPlexGetGatherDM()
1863@*/
1864PetscErrorCode DMPlexGetRedundantDM(DM dm, PetscSF *sf, DM *redundantMesh)
1865{
1866 MPI_Comm comm;
1867 PetscMPIInt size, rank;
1868 PetscInt pStart, pEnd, p;
1869 PetscInt numPoints = -1;
1870 PetscSF migrationSF, sfPoint, gatherSF;
1871 DM gatherDM, dmCoord;
1872 PetscSFNode *points;
1873 PetscErrorCode ierr;
1874
1875 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
; petscstack->line[petscstack->currentsize] = 1875; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1876 PetscValidHeaderSpecific(dm,DM_CLASSID,1)do { if (!dm) return PetscError(((MPI_Comm)0x44000001),1876,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(dm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1876,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(dm))->classid != DM_CLASSID) { if (
((PetscObject)(dm))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1876,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1876,__func__
,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,62,PETSC_ERROR_INITIAL,"Wrong type of object: Parameter # %d"
,1); } } while (0)
;
1877 PetscValidPointer(redundantMesh,2)do { if (!redundantMesh) return PetscError(((MPI_Comm)0x44000001
),1877,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(redundantMesh,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),1877,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
1878 *redundantMesh = NULL((void*)0);
1879 comm = PetscObjectComm((PetscObject)dm);
1880 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1880,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1881 if (size == 1) {
1882 ierr = PetscObjectReference((PetscObject) dm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1882,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1883 *redundantMesh = dm;
1884 if (sf) *sf = NULL((void*)0);
1885 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1886 }
1887 ierr = DMPlexGetGatherDM(dm,&gatherSF,&gatherDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1887,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1888 if (!gatherDM) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1889 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1889,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1890 ierr = DMPlexGetChart(gatherDM,&pStart,&pEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1890,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1891 numPoints = pEnd - pStart;
1892 ierr = MPI_Bcast(&numPoints,1,MPIU_INT,0,comm)((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) || MPI_Bcast
((&numPoints),(1),(((MPI_Datatype)0x4c000405)),(0),(comm)
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1892,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1893 ierr = PetscMalloc1(numPoints,&points)PetscMallocA(1,PETSC_FALSE,1893,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,(size_t)(numPoints)*sizeof(**(&points)),(&points))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1893,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1894 ierr = PetscSFCreate(comm,&migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1894,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1895 for (p = 0; p < numPoints; p++) {
1896 points[p].index = p;
1897 points[p].rank = 0;
1898 }
1899 ierr = PetscSFSetGraph(migrationSF,pEnd-pStart,numPoints,NULL((void*)0),PETSC_OWN_POINTER,points,PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1899,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1900 ierr = DMPlexCreate(comm, redundantMesh);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1900,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1901 ierr = PetscObjectSetName((PetscObject) *redundantMesh, "Redundant Mesh");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1901,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1902 ierr = DMPlexMigrate(gatherDM, migrationSF, *redundantMesh);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1902,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1903 ierr = DMPlexCreatePointSF(*redundantMesh, migrationSF, PETSC_FALSE, &sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1903,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1904 ierr = DMSetPointSF(*redundantMesh, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1904,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1905 ierr = DMGetCoordinateDM(*redundantMesh, &dmCoord);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1905,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1906 if (dmCoord) {ierr = DMSetPointSF(dmCoord, sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1906,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1907 ierr = PetscSFDestroy(&sfPoint);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1907,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1908 if (sf) {
1909 PetscSF tsf;
1910
1911 ierr = PetscSFCompose(gatherSF,migrationSF,&tsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1911,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1912 ierr = DMPlexStratifyMigrationSF(dm, tsf, sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1912,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1913 ierr = PetscSFDestroy(&tsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1913,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1914 }
1915 ierr = PetscSFDestroy(&migrationSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1915,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1916 ierr = PetscSFDestroy(&gatherSF);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1916,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1917 ierr = DMDestroy(&gatherDM);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1917,__func__,"/sandbox/petsc/petsc.next-tmp/src/dm/impls/plex/plexdistribute.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1918 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1919}

/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h

1#if !defined(PETSC_HASHSETI_H)
2#define PETSC_HASHSETI_H
3
4#include <petsc/private/hashset.h>
5
6PETSC_HASH_SET(HSetI, PetscInt, PetscHashInt, PetscHashEqual)typedef struct kh_HSetI_s { khint_t n_buckets, size, n_occupied
, upper_bound; khint32_t *flags; PetscInt *keys; char *vals; }
kh_HSetI_t; static inline __attribute((unused)) kh_HSetI_t *
kh_init_HSetI(void) { return (kh_HSetI_t*)calloc(1,sizeof(kh_HSetI_t
)); } static inline __attribute((unused)) void kh_destroy_HSetI
(kh_HSetI_t *h) { if (h) { free((void *)h->keys); free(h->
flags); free((void *)h->vals); free(h); } } static inline __attribute
((unused)) void kh_clear_HSetI(kh_HSetI_t *h) { if (h &&
h->flags) { memset(h->flags, 0xaa, ((h->n_buckets) <
16? 1 : (h->n_buckets)>>4) * sizeof(khint32_t)); h->
size = h->n_occupied = 0; } } static inline __attribute((unused
)) khint_t kh_get_HSetI(const kh_HSetI_t *h, PetscInt key) { if
(h->n_buckets) { khint_t k, i, last, mask, step = 0; mask
= h->n_buckets - 1; k = PetscHashInt(key); i = k & mask
; last = i; while (!((h->flags[i>>4]>>((i&
0xfU)<<1))&2) && (((h->flags[i>>4]
>>((i&0xfU)<<1))&1) || !((h->keys[i]) ==
(key)))) { i = (i + (++step)) & mask; if (i == last) return
h->n_buckets; } return ((h->flags[i>>4]>>(
(i&0xfU)<<1))&3)? h->n_buckets : i; } else return
0; } static inline __attribute((unused)) int kh_resize_HSetI
(kh_HSetI_t *h, khint_t new_n_buckets) { khint32_t *new_flags
= 0; khint_t j = 1; { (--(new_n_buckets), (new_n_buckets)|=(
new_n_buckets)>>1, (new_n_buckets)|=(new_n_buckets)>>
2, (new_n_buckets)|=(new_n_buckets)>>4, (new_n_buckets)
|=(new_n_buckets)>>8, (new_n_buckets)|=(new_n_buckets)>>
16, ++(new_n_buckets)); if (new_n_buckets < 4) new_n_buckets
= 4; if (h->size >= (khint_t)(new_n_buckets * __ac_HASH_UPPER
+ 0.5)) j = 0; else { new_flags = (khint32_t*)malloc(((new_n_buckets
) < 16? 1 : (new_n_buckets)>>4) * sizeof(khint32_t))
; if (!new_flags) return -1; memset(new_flags, 0xaa, ((new_n_buckets
) < 16? 1 : (new_n_buckets)>>4) * sizeof(khint32_t))
; if (h->n_buckets < new_n_buckets) { PetscInt *new_keys
= (PetscInt*)realloc((void *)h->keys,new_n_buckets * sizeof
(PetscInt)); if (!new_keys) { free(new_flags); return -1; } h
->keys = new_keys; if (0) { char *new_vals = (char*)realloc
((void *)h->vals,new_n_buckets * sizeof(char)); if (!new_vals
) { free(new_flags); return -1; } h->vals = new_vals; } } }
} if (j) { for (j = 0; j != h->n_buckets; ++j) { if (((h->
flags[j>>4]>>((j&0xfU)<<1))&3) == 0
) { PetscInt key = h->keys[j]; char val; khint_t new_mask;
new_mask = new_n_buckets - 1; if (0) val = h->vals[j]; (h
->flags[j>>4]|=1ul<<((j&0xfU)<<1)); while
(1) { khint_t k, i, step = 0; k = PetscHashInt(key); i = k &
new_mask; while (!((new_flags[i>>4]>>((i&0xfU
)<<1))&2)) i = (i + (++step)) & new_mask; (new_flags
[i>>4]&=~(2ul<<((i&0xfU)<<1))); if (
i < h->n_buckets && ((h->flags[i>>4]>>
((i&0xfU)<<1))&3) == 0) { { PetscInt tmp = h->
keys[i]; h->keys[i] = key; key = tmp; } if (0) { char tmp =
h->vals[i]; h->vals[i] = val; val = tmp; } (h->flags
[i>>4]|=1ul<<((i&0xfU)<<1)); } else { h
->keys[i] = key; if (0) h->vals[i] = val; break; } } } }
if (h->n_buckets > new_n_buckets) { h->keys = (PetscInt
*)realloc((void *)h->keys,new_n_buckets * sizeof(PetscInt)
); if (0) h->vals = (char*)realloc((void *)h->vals,new_n_buckets
* sizeof(char)); } free(h->flags); h->flags = new_flags
; h->n_buckets = new_n_buckets; h->n_occupied = h->size
; h->upper_bound = (khint_t)(h->n_buckets * __ac_HASH_UPPER
+ 0.5); } return 0; } static inline __attribute((unused)) khint_t
kh_put_HSetI(kh_HSetI_t *h, PetscInt key, int *ret) { khint_t
x; if (h->n_occupied >= h->upper_bound) { if (h->
n_buckets > (h->size<<1)) { if (kh_resize_HSetI(h
, h->n_buckets - 1) < 0) { *ret = -1; return h->n_buckets
; } } else if (kh_resize_HSetI(h, h->n_buckets + 1) < 0
) { *ret = -1; return h->n_buckets; } } { khint_t k, i, site
, last, mask = h->n_buckets - 1, step = 0; x = site = h->
n_buckets; k = PetscHashInt(key); i = k & mask; if (((h->
flags[i>>4]>>((i&0xfU)<<1))&2)) x =
i; else { last = i; while (!((h->flags[i>>4]>>
((i&0xfU)<<1))&2) && (((h->flags[i>>
4]>>((i&0xfU)<<1))&1) || !((h->keys[i]
) == (key)))) { if (((h->flags[i>>4]>>((i&
0xfU)<<1))&1)) site = i; i = (i + (++step)) & mask
; if (i == last) { x = site; break; } } if (x == h->n_buckets
) { if (((h->flags[i>>4]>>((i&0xfU)<<
1))&2) && site != h->n_buckets) x = site; else
x = i; } } } if (((h->flags[x>>4]>>((x&0xfU
)<<1))&2)) { h->keys[x] = key; (h->flags[x>>
4]&=~(3ul<<((x&0xfU)<<1))); ++h->size;
++h->n_occupied; *ret = 1; } else if (((h->flags[x>>
4]>>((x&0xfU)<<1))&1)) { h->keys[x] = key
; (h->flags[x>>4]&=~(3ul<<((x&0xfU)<<
1))); ++h->size; *ret = 2; } else *ret = 0; return x; } static
inline __attribute((unused)) void kh_del_HSetI(kh_HSetI_t *h
, khint_t x) { if (x != h->n_buckets && !((h->flags
[x>>4]>>((x&0xfU)<<1))&3)) { (h->
flags[x>>4]|=1ul<<((x&0xfU)<<1)); --h->
size; } } typedef kh_HSetI_t *PetscHSetI; static inline __attribute
((unused)) PetscErrorCode PetscHSetICreate(PetscHSetI *ht) { do
{ do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); *ht = kh_init_HSetI(); do { do { ; if (petscstack
&& petscstack->currentsize > 0) { petscstack->
currentsize--; petscstack->function[petscstack->currentsize
] = 0; petscstack->file[petscstack->currentsize] = 0; petscstack
->line[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); } static inline __attribute((unused)) PetscErrorCode
PetscHSetIDestroy(PetscHSetI *ht) { do { do { ; if (petscstack
&& (petscstack->currentsize < 64)) { petscstack
->function[petscstack->currentsize] = __func__; petscstack
->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); if (!*ht) do { do { ; if (petscstack &&
petscstack->currentsize > 0) { petscstack->currentsize
--; petscstack->function[petscstack->currentsize] = 0; petscstack
->file[petscstack->currentsize] = 0; petscstack->line
[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); kh_destroy_HSetI(*ht); *ht = ((void*)0); do {
do { ; if (petscstack && petscstack->currentsize >
0) { petscstack->currentsize--; petscstack->function[petscstack
->currentsize] = 0; petscstack->file[petscstack->currentsize
] = 0; petscstack->line[petscstack->currentsize] = 0; petscstack
->petscroutine[petscstack->currentsize] = PETSC_FALSE; }
if (petscstack) { petscstack->hotdepth = (((petscstack->
hotdepth-1)<(0)) ? (0) : (petscstack->hotdepth-1)); } ;
} while (0); return(0);} while (0); } static inline __attribute
((unused)) PetscErrorCode PetscHSetIReset(PetscHSetI ht) { do
{ do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); { if (ht) { free((ht)->keys); free((ht)->
flags); free((ht)->vals); memset((ht), 0x00, sizeof(*(ht))
); } }; do { do { ; if (petscstack && petscstack->
currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIDuplicate
(PetscHSetI ht,PetscHSetI *hd) { int ret; PetscInt key; do { do
{ ; if (petscstack && (petscstack->currentsize <
64)) { petscstack->function[petscstack->currentsize] =
__func__; petscstack->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!hd) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(hd,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0); *hd = kh_init_HSetI(); ret = kh_resize_HSetI(*hd
, ((ht)->size)); do { if (__builtin_expect(!!(!(ret==0)),0
)) return PetscError(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret==0"
); } while(0); { khint_t __i; for (__i = (khint_t)(0); __i !=
((ht)->n_buckets); ++__i) { if (!(!(((ht)->flags[(__i)
>>4]>>(((__i)&0xfU)<<1))&3))) continue
; (key) = ((ht)->keys[__i]); { kh_put_HSetI(*hd, key, &
ret); do { if (__builtin_expect(!!(!(ret>=0)),0)) return PetscError
(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret>=0"
); } while(0);}; } } do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIClear
(PetscHSetI ht) { do { do { ; if (petscstack && (petscstack
->currentsize < 64)) { petscstack->function[petscstack
->currentsize] = __func__; petscstack->file[petscstack->
currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); kh_clear_HSetI(ht); do { do { ; if (petscstack &&
petscstack->currentsize > 0) { petscstack->currentsize
--; petscstack->function[petscstack->currentsize] = 0; petscstack
->file[petscstack->currentsize] = 0; petscstack->line
[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); } static inline __attribute((unused)) PetscErrorCode
PetscHSetIResize(PetscHSetI ht,PetscInt nb) { int ret; do { do
{ ; if (petscstack && (petscstack->currentsize <
64)) { petscstack->function[petscstack->currentsize] =
__func__; petscstack->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); ret = kh_resize_HSetI(ht, (khint_t)nb); do { if
(__builtin_expect(!!(!(ret==0)),0)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret==0"
); } while(0); do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIGetSize
(PetscHSetI ht,PetscInt *n) { do { do { ; if (petscstack &&
(petscstack->currentsize < 64)) { petscstack->function
[petscstack->currentsize] = __func__; petscstack->file[
petscstack->currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!n) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(n,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,2); } while (0); *n = (PetscInt)((ht)->size); do { do { ;
if (petscstack && petscstack->currentsize > 0)
{ petscstack->currentsize--; petscstack->function[petscstack
->currentsize] = 0; petscstack->file[petscstack->currentsize
] = 0; petscstack->line[petscstack->currentsize] = 0; petscstack
->petscroutine[petscstack->currentsize] = PETSC_FALSE; }
if (petscstack) { petscstack->hotdepth = (((petscstack->
hotdepth-1)<(0)) ? (0) : (petscstack->hotdepth-1)); } ;
} while (0); return(0);} while (0); } static inline __attribute
((unused)) PetscErrorCode PetscHSetIHas(PetscHSetI ht,PetscInt
key,PetscBool *has) { khiter_t iter; do { do { ; if (petscstack
&& (petscstack->currentsize < 64)) { petscstack
->function[petscstack->currentsize] = __func__; petscstack
->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!has) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(has,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0); iter = kh_get_HSetI(ht, key); *has = (iter != (
(ht)->n_buckets)) ? PETSC_TRUE : PETSC_FALSE; do { do { ; if
(petscstack && petscstack->currentsize > 0) { petscstack
->currentsize--; petscstack->function[petscstack->currentsize
] = 0; petscstack->file[petscstack->currentsize] = 0; petscstack
->line[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); } static inline __attribute((unused)) PetscErrorCode
PetscHSetIAdd(PetscHSetI ht,PetscInt key) { int ret; khiter_t
iter; do { do { ; if (petscstack && (petscstack->
currentsize < 64)) { petscstack->function[petscstack->
currentsize] = __func__; petscstack->file[petscstack->currentsize
] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); iter = kh_put_HSetI(ht, key, &ret); (void)iter
; do { if (__builtin_expect(!!(!(ret>=0)),0)) return PetscError
(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret>=0"
); } while(0); do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); } static inline __attribute((unused)) PetscErrorCode PetscHSetIDel
(PetscHSetI ht,PetscInt key) { khiter_t iter; do { do { ; if (
petscstack && (petscstack->currentsize < 64)) {
petscstack->function[petscstack->currentsize] = __func__
; petscstack->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); iter = kh_get_HSetI(ht, key); kh_del_HSetI(ht, iter
); do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0); } static
inline __attribute((unused)) PetscErrorCode PetscHSetIQueryAdd
(PetscHSetI ht,PetscInt key,PetscBool *missing) { int ret; khiter_t
iter; do { do { ; if (petscstack && (petscstack->
currentsize < 64)) { petscstack->function[petscstack->
currentsize] = __func__; petscstack->file[petscstack->currentsize
] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!missing) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(missing,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0); iter = kh_put_HSetI(ht, key, &ret); (void)iter
; do { if (__builtin_expect(!!(!(ret>=0)),0)) return PetscError
(((MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","ret>=0"
); } while(0); *missing = ret ? PETSC_TRUE : PETSC_FALSE; do {
do { ; if (petscstack && petscstack->currentsize >
0) { petscstack->currentsize--; petscstack->function[petscstack
->currentsize] = 0; petscstack->file[petscstack->currentsize
] = 0; petscstack->line[petscstack->currentsize] = 0; petscstack
->petscroutine[petscstack->currentsize] = PETSC_FALSE; }
if (petscstack) { petscstack->hotdepth = (((petscstack->
hotdepth-1)<(0)) ? (0) : (petscstack->hotdepth-1)); } ;
} while (0); return(0);} while (0); } static inline __attribute
((unused)) PetscErrorCode PetscHSetIQueryDel(PetscHSetI ht,PetscInt
key,PetscBool *present) { khiter_t iter; do { do { ; if (petscstack
&& (petscstack->currentsize < 64)) { petscstack
->function[petscstack->currentsize] = __func__; petscstack
->file[petscstack->currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!present) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(present,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0); iter = kh_get_HSetI(ht, key); if (iter != ((ht)
->n_buckets)) { kh_del_HSetI(ht, iter); *present = PETSC_TRUE
; } else { *present = PETSC_FALSE; } do { do { ; if (petscstack
&& petscstack->currentsize > 0) { petscstack->
currentsize--; petscstack->function[petscstack->currentsize
] = 0; petscstack->file[petscstack->currentsize] = 0; petscstack
->line[petscstack->currentsize] = 0; petscstack->petscroutine
[petscstack->currentsize] = PETSC_FALSE; } if (petscstack)
{ petscstack->hotdepth = (((petscstack->hotdepth-1)<
(0)) ? (0) : (petscstack->hotdepth-1)); } ; } while (0); return
(0);} while (0); } static inline __attribute((unused)) PetscErrorCode
PetscHSetIGetElems(PetscHSetI ht,PetscInt *off,PetscInt array
[]) { PetscInt key; PetscInt pos; do { do { ; if (petscstack &&
(petscstack->currentsize < 64)) { petscstack->function
[petscstack->currentsize] = __func__; petscstack->file[
petscstack->currentsize] = "/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
; petscstack->line[petscstack->currentsize] = 6; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0); do { if (!ht) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(ht,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0); do { if (!off) return PetscError(((MPI_Comm)0x44000001
),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(off,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashseti.h"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,2); } while (0); pos = *off; { khint_t __i; for (__i = (khint_t
)(0); __i != ((ht)->n_buckets); ++__i) { if (!(!(((ht)->
flags[(__i)>>4]>>(((__i)&0xfU)<<1))&
3))) continue; (key) = ((ht)->keys[__i]); array[pos++] = key
; } }; *off = pos; do { do { ; if (petscstack && petscstack
->currentsize > 0) { petscstack->currentsize--; petscstack
->function[petscstack->currentsize] = 0; petscstack->
file[petscstack->currentsize] = 0; petscstack->line[petscstack
->currentsize] = 0; petscstack->petscroutine[petscstack
->currentsize] = PETSC_FALSE; } if (petscstack) { petscstack
->hotdepth = (((petscstack->hotdepth-1)<(0)) ? (0) :
(petscstack->hotdepth-1)); } ; } while (0); return(0);} while
(0); }
7
Within the expansion of the macro 'PETSC_HASH_SET':
a
Assuming 'petscstack' is null
b
Assuming the condition is false
c
Calling 'kh_init_HSetI'
d
Returning from 'kh_init_HSetI'
e
Assuming 'petscstack' is null
8
Within the expansion of the macro 'PETSC_HASH_SET':
a
Null pointer value stored to field 'flags'
23
Within the expansion of the macro 'PETSC_HASH_SET':
a
Assuming 'petscstack' is null
b
Assuming 'ht' is non-null
c
Assuming the condition is false
d
Calling 'kh_put_HSetI'
24
Within the expansion of the macro 'PETSC_HASH_SET':
a
Calling 'kh_resize_HSetI'
b
Returning from 'kh_resize_HSetI'
c
Calling 'PetscHashInt'
d
Returning from 'PetscHashInt'
e
Array access (via field 'flags') results in a null pointer dereference
25
Within the expansion of the macro 'PETSC_HASH_SET':
7
8#endif /* PETSC_HASHSETI_H */

/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashtable.h

1#if !defined(PETSC_HASHTABLE_H)
2#define PETSC_HASHTABLE_H
3
4#include <petsc/private/petscimpl.h>
5
6#define kh_inlineinline PETSC_INLINEinline
7#define klib_unused__attribute((unused)) PETSC_UNUSED__attribute((unused))
8#include <petsc/private/kernels/khash.h>
9
10/* Required for khash <= 0.2.5 */
11#if !defined(kcalloc)
12#define kcalloc(N,Z)calloc(N,Z) calloc(N,Z)
13#endif
14#if !defined(kmalloc)
15#define kmalloc(Z)malloc(Z) malloc(Z)
16#endif
17#if !defined(krealloc)
18#define krealloc(P,Z)realloc(P,Z) realloc(P,Z)
19#endif
20#if !defined(kfree)
21#define kfree(P)free(P) free(P)
22#endif
23
24/* --- Useful extensions to khash --- */
25
26#if !defined(kh_reset)
27/*! @function
28 @abstract Reset a hash table to initial state.
29 @param name Name of the hash table [symbol]
30 @param h Pointer to the hash table [khash_t(name)*]
31 */
32#define kh_reset(name, h){ if (h) { free((h)->keys); free((h)->flags); free((h)->
vals); memset((h), 0x00, sizeof(*(h))); } }
{ \
33 if (h) { \
34 kfree((h)->keys)free((h)->keys); kfree((h)->flags)free((h)->flags); \
35 kfree((h)->vals)free((h)->vals); \
36 memset((h), 0x00, sizeof(*(h))); \
37 } }
38#endif /*kh_reset*/
39
40#if !defined(kh_foreach)
41/*! @function
42 @abstract Iterate over the entries in the hash table
43 @param h Pointer to the hash table [khash_t(name)*]
44 @param kvar Variable to which key will be assigned
45 @param vvar Variable to which value will be assigned
46 @param code Block of code to execute
47 */
48#define kh_foreach(h, kvar, vvar, code){ khint_t __i; for (__i = (khint_t)(0); __i != ((h)->n_buckets
); ++__i) { if (!(!(((h)->flags[(__i)>>4]>>(((
__i)&0xfU)<<1))&3))) continue; (kvar) = ((h)->
keys[__i]); (vvar) = ((h)->vals[__i]); code; } }
{ khint_t __i; \
49 for (__i = kh_begin(h)(khint_t)(0); __i != kh_end(h)((h)->n_buckets); ++__i) { \
50 if (!kh_exist(h,__i)(!(((h)->flags[(__i)>>4]>>(((__i)&0xfU)<<
1))&3))
) continue; \
51 (kvar) = kh_key(h,__i)((h)->keys[__i]); \
52 (vvar) = kh_val(h,__i)((h)->vals[__i]); \
53 code; \
54 } }
55#endif /*kh_foreach*/
56
57#if !defined(kh_foreach_key)
58/*! @function
59 @abstract Iterate over the keys in the hash table
60 @param h Pointer to the hash table [khash_t(name)*]
61 @param kvar Variable to which key will be assigned
62 @param code Block of code to execute
63 */
64#define kh_foreach_key(h, kvar, code){ khint_t __i; for (__i = (khint_t)(0); __i != ((h)->n_buckets
); ++__i) { if (!(!(((h)->flags[(__i)>>4]>>(((
__i)&0xfU)<<1))&3))) continue; (kvar) = ((h)->
keys[__i]); code; } }
{ khint_t __i; \
65 for (__i = kh_begin(h)(khint_t)(0); __i != kh_end(h)((h)->n_buckets); ++__i) { \
66 if (!kh_exist(h,__i)(!(((h)->flags[(__i)>>4]>>(((__i)&0xfU)<<
1))&3))
) continue; \
67 (kvar) = kh_key(h,__i)((h)->keys[__i]); \
68 code; \
69 } }
70#endif /*kh_foreach_key*/
71
72#if !defined(kh_foreach_value)
73/*! @function
74 @abstract Iterate over the values in the hash table
75 @param h Pointer to the hash table [khash_t(name)*]
76 @param vvar Variable to which value will be assigned
77 @param code Block of code to execute
78 */
79#define kh_foreach_value(h, vvar, code){ khint_t __i; for (__i = (khint_t)(0); __i != ((h)->n_buckets
); ++__i) { if (!(!(((h)->flags[(__i)>>4]>>(((
__i)&0xfU)<<1))&3))) continue; (vvar) = ((h)->
vals[__i]); code; } }
{ khint_t __i; \
80 for (__i = kh_begin(h)(khint_t)(0); __i != kh_end(h)((h)->n_buckets); ++__i) { \
81 if (!kh_exist(h,__i)(!(((h)->flags[(__i)>>4]>>(((__i)&0xfU)<<
1))&3))
) continue; \
82 (vvar) = kh_val(h,__i)((h)->vals[__i]); \
83 code; \
84 } }
85#endif /*kh_foreach_value*/
86
87
88/* --- Helper macro for error checking --- */
89
90#if defined(PETSC_USE_DEBUG1)
91#define PetscHashAssert(expr)do { if (__builtin_expect(!!(!(expr)),0)) return PetscError((
(MPI_Comm)0x44000001),91,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
); } while(0)
do { \
92 if (PetscUnlikely(!(expr))__builtin_expect(!!(!(expr)),0)) \
93 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB, \return PetscError(((MPI_Comm)0x44000001),95,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
)
94 "[khash] Assertion: `%s' failed.", \return PetscError(((MPI_Comm)0x44000001),95,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
)
95 PetscStringize(expr))return PetscError(((MPI_Comm)0x44000001),95,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
)
; \
96} while(0)
97#else
98#define PetscHashAssert(expr)do { if (__builtin_expect(!!(!(expr)),0)) return PetscError((
(MPI_Comm)0x44000001),98,__func__,"/sandbox/petsc/petsc.next-tmp/include/petsc/private/hashtable.h"
,76,PETSC_ERROR_INITIAL,"[khash] Assertion: `%s' failed.","expr"
); } while(0)
((void)(expr))
99#endif
100
101
102/* --- Low level iterator API --- */
103
104typedef khiter_t PetscHashIter;
105
106#define PetscHashIterBegin(ht,i)do { (i) = (khint_t)(0); if ((i) != (((ht))->n_buckets) &&
!(!((((ht))->flags[((i))>>4]>>((((i))&0xfU
)<<1))&3))) do { ++((i)); } while (((i)) != ((((ht)
))->n_buckets) && !(!(((((ht)))->flags[(((i)))>>
4]>>(((((i)))&0xfU)<<1))&3))); } while (0
)
do { \
107 (i) = kh_begin((ht))(khint_t)(0); \
108 if ((i) != kh_end((ht))(((ht))->n_buckets) && !kh_exist((ht),(i))(!((((ht))->flags[((i))>>4]>>((((i))&0xfU)
<<1))&3))
) \
109 PetscHashIterNext((ht),(i))do { ++((i)); } while (((i)) != ((((ht)))->n_buckets) &&
!(!(((((ht)))->flags[(((i)))>>4]>>(((((i)))&
0xfU)<<1))&3)))
; \
110} while (0)
111
112#define PetscHashIterNext(ht,i)do { ++(i); } while ((i) != (((ht))->n_buckets) &&
!(!((((ht))->flags[((i))>>4]>>((((i))&0xfU
)<<1))&3)))
\
113 do { ++(i); } while ((i) != kh_end((ht))(((ht))->n_buckets) && !kh_exist((ht),(i))(!((((ht))->flags[((i))>>4]>>((((i))&0xfU)
<<1))&3))
)
114
115#define PetscHashIterAtEnd(ht,i)((i) == (((ht))->n_buckets)) ((i) == kh_end((ht))(((ht))->n_buckets))
116
117#define PetscHashIterGetKey(ht,i,k)((k) = (((ht))->keys[(i)])) ((k) = kh_key((ht),(i))(((ht))->keys[(i)]))
118
119#define PetscHashIterGetVal(ht,i,v)((v) = (((ht))->vals[(i)])) ((v) = kh_val((ht),(i))(((ht))->vals[(i)]))
120
121#define PetscHashIterSetVal(ht,i,v)((((ht))->vals[(i)]) = (v)) (kh_val((ht),(i))(((ht))->vals[(i)]) = (v))
122
123
124/* --- Thomas Wang integer hash functions --- */
125
126typedef khint32_t PetscHash32_t;
127typedef khint64_t PetscHash64_t;
128typedef khint_t PetscHash_t;
129
130/* Thomas Wang's first version for 32bit integers */
131PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHash_UInt32_v0(PetscHash32_t key)
132{
133 key += ~(key << 15);
134 key ^= (key >> 10);
135 key += (key << 3);
136 key ^= (key >> 6);
137 key += ~(key << 11);
138 key ^= (key >> 16);
139 return key;
140}
141
142/* Thomas Wang's second version for 32bit integers */
143PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHash_UInt32_v1(PetscHash32_t key)
144{
145 key = ~key + (key << 15); /* key = (key << 15) - key - 1; */
146 key = key ^ (key >> 12);
147 key = key + (key << 2);
148 key = key ^ (key >> 4);
149 key = key * 2057; /* key = (key + (key << 3)) + (key << 11); */
150 key = key ^ (key >> 16);
151 return key;
152}
153
154PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHash_UInt32(PetscHash32_t key)
155{
156 return PetscHash_UInt32_v1(key);
27
Calling 'PetscHash_UInt32_v1'
28
Returning from 'PetscHash_UInt32_v1'
157}
158
159/* Thomas Wang's version for 64bit integer -> 32bit hash */
160PETSC_STATIC_INLINEstatic inline PetscHash32_t PetscHash_UInt64_32(PetscHash64_t key)
161{
162 key = ~key + (key << 18); /* key = (key << 18) - key - 1; */
163 key = key ^ (key >> 31);
164 key = key * 21; /* key = (key + (key << 2)) + (key << 4); */
165 key = key ^ (key >> 11);
166 key = key + (key << 6);
167 key = key ^ (key >> 22);
168 return (PetscHash32_t)key;
169}
170
171/* Thomas Wang's version for 64bit integer -> 64bit hash */
172PETSC_STATIC_INLINEstatic inline PetscHash64_t PetscHash_UInt64_64(PetscHash64_t key)
173{
174 key = ~key + (key << 21); /* key = (key << 21) - key - 1; */
175 key = key ^ (key >> 24);
176 key = key * 265; /* key = (key + (key << 3)) + (key << 8); */
177 key = key ^ (key >> 14);
178 key = key * 21; /* key = (key + (key << 2)) + (key << 4); */
179 key = key ^ (key >> 28);
180 key = key + (key << 31);
181 return key;
182}
183
184PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHash_UInt64(PetscHash64_t key)
185{
186 return sizeof(PetscHash_t) < sizeof(PetscHash64_t)
187 ? (PetscHash_t)PetscHash_UInt64_32(key)
188 : (PetscHash_t)PetscHash_UInt64_64(key);
189}
190
191PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHashInt(PetscInt key)
192{
193#if defined(PETSC_USE_64BIT_INDICES)
194 return PetscHash_UInt64((PetscHash64_t)key);
195#else
196 return PetscHash_UInt32((PetscHash32_t)key);
26
Calling 'PetscHash_UInt32'
29
Returning from 'PetscHash_UInt32'
197#endif
198}
199
200PETSC_STATIC_INLINEstatic inline PetscHash_t PetscHashCombine(PetscHash_t seed, PetscHash_t hash)
201{
202 /* https://doi.org/10.1002/asi.10170 */
203 /* https://dl.acm.org/citation.cfm?id=759509 */
204 return seed ^ (hash + (seed << 6) + (seed >> 2));
205}
206
207#define PetscHashEqual(a,b)((a) == (b)) ((a) == (b))
208
209
210#endif /* PETSC_HASHTABLE_H */