Actual source code: plexexodusii.c
1: #define PETSCDM_DLL
2: #include <petsc/private/dmpleximpl.h>
4: #if defined(PETSC_HAVE_EXODUSII)
5: #include <netcdf.h>
6: #include <exodusII.h>
7: #endif
9: #include <petsc/private/viewerimpl.h>
10: #include <petsc/private/viewerexodusiiimpl.h>
11: #if defined(PETSC_HAVE_EXODUSII)
12: /*@C
13: PETSC_VIEWER_EXODUSII_ - Creates an `PETSCVIEWEREXODUSII` `PetscViewer` shared by all processors in a communicator.
15: Collective
17: Input Parameter:
18: . comm - the MPI communicator to share the `PETSCVIEWEREXODUSII` `PetscViewer`
20: Level: intermediate
22: Note:
23: Unlike almost all other PETSc routines, PETSC_VIEWER_EXODUSII_ does not return
24: an error code. The GLVIS PetscViewer is usually used in the form
25: $ XXXView(XXX object, PETSC_VIEWER_EXODUSII_(comm));
27: Fortran Note:
28: No support in Fortran
30: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewer`, `PetscViewerExodusIIOpen()`, `PetscViewerType`, `PetscViewerCreate()`, `PetscViewerDestroy()`
31: @*/
32: PetscViewer PETSC_VIEWER_EXODUSII_(MPI_Comm comm)
33: {
34: PetscViewer viewer;
35: PetscErrorCode ierr;
37: PetscFunctionBegin;
38: ierr = PetscViewerExodusIIOpen(comm, "mesh.exo", FILE_MODE_WRITE, &viewer);
39: if (ierr) {
40: ierr = PetscError(PETSC_COMM_SELF, __LINE__, "PETSC_VIEWER_EXODUSII_", __FILE__, PETSC_ERR_PLIB, PETSC_ERROR_INITIAL, " ");
41: PetscFunctionReturn(NULL);
42: }
43: ierr = PetscObjectRegisterDestroy((PetscObject)viewer);
44: if (ierr) {
45: ierr = PetscError(PETSC_COMM_SELF, __LINE__, "PETSC_VIEWER_EXODUSII_", __FILE__, PETSC_ERR_PLIB, PETSC_ERROR_INITIAL, " ");
46: PetscFunctionReturn(NULL);
47: }
48: PetscFunctionReturn(viewer);
49: }
51: static PetscErrorCode PetscViewerView_ExodusII(PetscViewer v, PetscViewer viewer)
52: {
53: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)v->data;
55: PetscFunctionBegin;
56: if (exo->filename) PetscCall(PetscViewerASCIIPrintf(viewer, "Filename: %s\n", exo->filename));
57: if (exo->exoid) PetscCall(PetscViewerASCIIPrintf(viewer, "exoid: %d\n", exo->exoid));
58: if (exo->btype) PetscCall(PetscViewerASCIIPrintf(viewer, "IO Mode: %d\n", exo->btype));
59: if (exo->order) PetscCall(PetscViewerASCIIPrintf(viewer, "Mesh order: %" PetscInt_FMT "\n", exo->order));
60: PetscFunctionReturn(PETSC_SUCCESS);
61: }
63: static PetscErrorCode PetscViewerSetFromOptions_ExodusII(PetscViewer v, PetscOptionItems *PetscOptionsObject)
64: {
65: PetscFunctionBegin;
66: PetscOptionsHeadBegin(PetscOptionsObject, "ExodusII PetscViewer Options");
67: PetscOptionsHeadEnd();
68: PetscFunctionReturn(PETSC_SUCCESS);
69: }
71: static PetscErrorCode PetscViewerSetUp_ExodusII(PetscViewer viewer)
72: {
73: PetscFunctionBegin;
74: PetscFunctionReturn(PETSC_SUCCESS);
75: }
77: static PetscErrorCode PetscViewerDestroy_ExodusII(PetscViewer viewer)
78: {
79: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
81: PetscFunctionBegin;
82: if (exo->exoid >= 0) PetscCallExternal(ex_close, exo->exoid);
83: PetscCall(PetscFree(exo->filename));
84: PetscCall(PetscFree(exo));
85: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerFileSetName_C", NULL));
86: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerFileGetName_C", NULL));
87: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerFileSetMode_C", NULL));
88: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerFileGetMode_C", NULL));
89: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerGetId_C", NULL));
90: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerGetOrder_C", NULL));
91: PetscCall(PetscObjectComposeFunction((PetscObject)viewer, "PetscViewerSetOrder_C", NULL));
92: PetscFunctionReturn(PETSC_SUCCESS);
93: }
95: static PetscErrorCode PetscViewerFileSetName_ExodusII(PetscViewer viewer, const char name[])
96: {
97: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
98: PetscMPIInt rank;
99: int CPU_word_size, IO_word_size, EXO_mode;
100: MPI_Info mpi_info = MPI_INFO_NULL;
101: float EXO_version;
103: PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)viewer), &rank));
104: CPU_word_size = sizeof(PetscReal);
105: IO_word_size = sizeof(PetscReal);
107: PetscFunctionBegin;
108: if (exo->exoid >= 0) {
109: PetscCallExternal(ex_close, exo->exoid);
110: exo->exoid = -1;
111: }
112: if (exo->filename) PetscCall(PetscFree(exo->filename));
113: PetscCall(PetscStrallocpy(name, &exo->filename));
114: switch (exo->btype) {
115: case FILE_MODE_READ:
116: EXO_mode = EX_READ;
117: break;
118: case FILE_MODE_APPEND:
119: case FILE_MODE_UPDATE:
120: case FILE_MODE_APPEND_UPDATE:
121: /* Will fail if the file does not already exist */
122: EXO_mode = EX_WRITE;
123: break;
124: case FILE_MODE_WRITE:
125: /*
126: exodus only allows writing geometry upon file creation, so we will let DMView create the file.
127: */
128: PetscFunctionReturn(PETSC_SUCCESS);
129: break;
130: default:
131: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
132: }
133: #if defined(PETSC_USE_64BIT_INDICES)
134: EXO_mode += EX_ALL_INT64_API;
135: #endif
136: exo->exoid = ex_open_par(name, EXO_mode, &CPU_word_size, &IO_word_size, &EXO_version, PETSC_COMM_WORLD, mpi_info);
137: PetscCheck(exo->exoid >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", name);
138: PetscFunctionReturn(PETSC_SUCCESS);
139: }
141: static PetscErrorCode PetscViewerFileGetName_ExodusII(PetscViewer viewer, const char **name)
142: {
143: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
145: PetscFunctionBegin;
146: *name = exo->filename;
147: PetscFunctionReturn(PETSC_SUCCESS);
148: }
150: static PetscErrorCode PetscViewerFileSetMode_ExodusII(PetscViewer viewer, PetscFileMode type)
151: {
152: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
154: PetscFunctionBegin;
155: exo->btype = type;
156: PetscFunctionReturn(PETSC_SUCCESS);
157: }
159: static PetscErrorCode PetscViewerFileGetMode_ExodusII(PetscViewer viewer, PetscFileMode *type)
160: {
161: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
163: PetscFunctionBegin;
164: *type = exo->btype;
165: PetscFunctionReturn(PETSC_SUCCESS);
166: }
168: static PetscErrorCode PetscViewerExodusIIGetId_ExodusII(PetscViewer viewer, int *exoid)
169: {
170: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
172: PetscFunctionBegin;
173: *exoid = exo->exoid;
174: PetscFunctionReturn(PETSC_SUCCESS);
175: }
177: static PetscErrorCode PetscViewerExodusIIGetOrder_ExodusII(PetscViewer viewer, PetscInt *order)
178: {
179: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
181: PetscFunctionBegin;
182: *order = exo->order;
183: PetscFunctionReturn(PETSC_SUCCESS);
184: }
186: static PetscErrorCode PetscViewerExodusIISetOrder_ExodusII(PetscViewer viewer, PetscInt order)
187: {
188: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
190: PetscFunctionBegin;
191: exo->order = order;
192: PetscFunctionReturn(PETSC_SUCCESS);
193: }
195: /*MC
196: PETSCVIEWEREXODUSII - A viewer that writes to an Exodus II file
198: .seealso: `PetscViewerExodusIIOpen()`, `PetscViewerCreate()`, `PETSCVIEWERBINARY`, `PETSCVIEWERHDF5`, `DMView()`,
199: `PetscViewerFileSetName()`, `PetscViewerFileSetMode()`, `PetscViewerFormat`, `PetscViewerType`, `PetscViewerSetType()`
201: Level: beginner
202: M*/
204: PETSC_EXTERN PetscErrorCode PetscViewerCreate_ExodusII(PetscViewer v)
205: {
206: PetscViewer_ExodusII *exo;
208: PetscFunctionBegin;
209: PetscCall(PetscNew(&exo));
211: v->data = (void *)exo;
212: v->ops->destroy = PetscViewerDestroy_ExodusII;
213: v->ops->setfromoptions = PetscViewerSetFromOptions_ExodusII;
214: v->ops->setup = PetscViewerSetUp_ExodusII;
215: v->ops->view = PetscViewerView_ExodusII;
216: v->ops->flush = 0;
217: exo->btype = (PetscFileMode)-1;
218: exo->filename = 0;
219: exo->exoid = -1;
221: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerFileSetName_C", PetscViewerFileSetName_ExodusII));
222: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerFileGetName_C", PetscViewerFileGetName_ExodusII));
223: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerFileSetMode_C", PetscViewerFileSetMode_ExodusII));
224: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerFileGetMode_C", PetscViewerFileGetMode_ExodusII));
225: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerGetId_C", PetscViewerExodusIIGetId_ExodusII));
226: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerSetOrder_C", PetscViewerExodusIISetOrder_ExodusII));
227: PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscViewerGetOrder_C", PetscViewerExodusIIGetOrder_ExodusII));
228: PetscFunctionReturn(PETSC_SUCCESS);
229: }
231: /*
232: EXOGetVarIndex - Locate a result in an exodus file based on its name
234: Collective
236: Input Parameters:
237: + exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
238: . obj_type - the type of entity for instance EX_NODAL, EX_ELEM_BLOCK
239: - name - the name of the result
241: Output Parameters:
242: . varIndex - the location in the exodus file of the result
244: Notes:
245: The exodus variable index is obtained by comparing name and the
246: names of zonal variables declared in the exodus file. For instance if name is "V"
247: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
248: amongst all variables of type obj_type.
250: Level: beginner
252: .seealso: `EXOGetVarIndex()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecLoadNodal_PlexEXO()`, `VecLoadZonal_PlexEXO()`
253: */
254: PetscErrorCode EXOGetVarIndex_Internal(int exoid, ex_entity_type obj_type, const char name[], int *varIndex)
255: {
256: int num_vars, i, j;
257: char ext_name[MAX_STR_LENGTH + 1], var_name[MAX_STR_LENGTH + 1];
258: const int num_suffix = 5;
259: char *suffix[5];
260: PetscBool flg;
262: PetscFunctionBegin;
263: suffix[0] = (char *)"";
264: suffix[1] = (char *)"_X";
265: suffix[2] = (char *)"_XX";
266: suffix[3] = (char *)"_1";
267: suffix[4] = (char *)"_11";
269: *varIndex = -1;
270: PetscCallExternal(ex_get_variable_param, exoid, obj_type, &num_vars);
271: for (i = 0; i < num_vars; ++i) {
272: PetscCallExternal(ex_get_variable_name, exoid, obj_type, i + 1, var_name);
273: for (j = 0; j < num_suffix; ++j) {
274: PetscCall(PetscStrncpy(ext_name, name, MAX_STR_LENGTH));
275: PetscCall(PetscStrlcat(ext_name, suffix[j], MAX_STR_LENGTH));
276: PetscCall(PetscStrcasecmp(ext_name, var_name, &flg));
277: if (flg) *varIndex = i + 1;
278: }
279: }
280: PetscFunctionReturn(PETSC_SUCCESS);
281: }
283: /*
284: DMView_PlexExodusII - Write a DM to disk in exodus format
286: Collective on dm
288: Input Parameters:
289: + dm - The dm to be written
290: . viewer - an exodusII viewer
292: Notes:
293: Not all DM can be written to disk this way. For instance, exodus assume that element blocks (mapped to "Cell sets" labels)
294: consists of sequentially numbered cells. If this is not the case, the exodus file will be corrupted.
296: If the dm has been distributed, only the part of the DM on MPI rank 0 (including "ghost" cells and vertices)
297: will be written.
299: DMPlex only represents geometry while most post-processing software expect that a mesh also provides information
300: on the discretization space. This function assumes that the file represents Lagrange finite elements of order 1 or 2.
301: The order of the mesh shall be set using PetscViewerExodusIISetOrder
302: It should be extended to use PetscFE objects.
304: This function will only handle TRI, TET, QUAD, and HEX cells.
305: Level: beginner
307: .seealso:
308: */
309: PetscErrorCode DMView_PlexExodusII(DM dm, PetscViewer viewer)
310: {
311: enum ElemType {
312: TRI,
313: QUAD,
314: TET,
315: HEX
316: };
317: MPI_Comm comm;
318: PetscInt degree; /* the order of the mesh */
319: /* Connectivity Variables */
320: PetscInt cellsNotInConnectivity;
321: /* Cell Sets */
322: DMLabel csLabel;
323: IS csIS;
324: const PetscInt *csIdx;
325: PetscInt num_cs, cs;
326: enum ElemType *type;
327: PetscBool hasLabel;
328: /* Coordinate Variables */
329: DM cdm;
330: PetscSection coordSection;
331: Vec coord;
332: PetscInt **nodes;
333: PetscInt depth, d, dim, skipCells = 0;
334: PetscInt pStart, pEnd, p, cStart, cEnd, numCells, vStart, vEnd, numVertices, eStart, eEnd, numEdges, fStart, fEnd, numFaces, numNodes;
335: PetscInt num_vs, num_fs;
336: PetscMPIInt rank, size;
337: const char *dmName;
338: PetscInt nodesTriP1[4] = {3, 0, 0, 0};
339: PetscInt nodesTriP2[4] = {3, 3, 0, 0};
340: PetscInt nodesQuadP1[4] = {4, 0, 0, 0};
341: PetscInt nodesQuadP2[4] = {4, 4, 0, 1};
342: PetscInt nodesTetP1[4] = {4, 0, 0, 0};
343: PetscInt nodesTetP2[4] = {4, 6, 0, 0};
344: PetscInt nodesHexP1[4] = {8, 0, 0, 0};
345: PetscInt nodesHexP2[4] = {8, 12, 6, 1};
346: int CPU_word_size, IO_word_size, EXO_mode;
347: float EXO_version;
349: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *)viewer->data;
351: PetscFunctionBegin;
352: PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
353: PetscCallMPI(MPI_Comm_rank(comm, &rank));
354: PetscCallMPI(MPI_Comm_size(comm, &size));
356: /*
357: Creating coordSection is a collective operation so we do it somewhat out of sequence
358: */
359: PetscCall(PetscSectionCreate(comm, &coordSection));
360: PetscCall(DMGetCoordinatesLocalSetUp(dm));
361: /*
362: Check that all points are on rank 0 since we don't know how to save distributed DM in exodus format
363: */
364: PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
365: PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd));
366: PetscCall(DMPlexGetDepthStratum(dm, 1, &eStart, &eEnd));
367: PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
368: numCells = cEnd - cStart;
369: numEdges = eEnd - eStart;
370: numVertices = vEnd - vStart;
371: PetscCheck(!(rank && (numCells || numEdges || numVertices)), PETSC_COMM_SELF, PETSC_ERR_SUP, "Writing distributed DM in exodusII format not supported");
372: if (rank == 0) {
373: switch (exo->btype) {
374: case FILE_MODE_READ:
375: case FILE_MODE_APPEND:
376: case FILE_MODE_UPDATE:
377: case FILE_MODE_APPEND_UPDATE:
378: /* exodusII does not allow writing geometry to an existing file */
379: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "cannot add geometry to existing file %s", exo->filename);
380: case FILE_MODE_WRITE:
381: /* Create an empty file if one already exists*/
382: EXO_mode = EX_CLOBBER;
383: #if defined(PETSC_USE_64BIT_INDICES)
384: EXO_mode += EX_ALL_INT64_API;
385: #endif
386: CPU_word_size = sizeof(PetscReal);
387: IO_word_size = sizeof(PetscReal);
388: exo->exoid = ex_create(exo->filename, EXO_mode, &CPU_word_size, &IO_word_size);
389: PetscCheck(exo->exoid >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_create failed for %s", exo->filename);
391: break;
392: default:
393: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
394: }
396: /* --- Get DM info --- */
397: PetscCall(PetscObjectGetName((PetscObject)dm, &dmName));
398: PetscCall(DMPlexGetDepth(dm, &depth));
399: PetscCall(DMGetDimension(dm, &dim));
400: PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
401: if (depth == 3) {
402: numFaces = fEnd - fStart;
403: } else {
404: numFaces = 0;
405: }
406: PetscCall(DMGetLabelSize(dm, "Cell Sets", &num_cs));
407: PetscCall(DMGetLabelSize(dm, "Vertex Sets", &num_vs));
408: PetscCall(DMGetLabelSize(dm, "Face Sets", &num_fs));
409: PetscCall(DMGetCoordinatesLocal(dm, &coord));
410: PetscCall(DMGetCoordinateDM(dm, &cdm));
411: if (num_cs > 0) {
412: PetscCall(DMGetLabel(dm, "Cell Sets", &csLabel));
413: PetscCall(DMLabelGetValueIS(csLabel, &csIS));
414: PetscCall(ISGetIndices(csIS, &csIdx));
415: }
416: PetscCall(PetscMalloc1(num_cs, &nodes));
417: /* Set element type for each block and compute total number of nodes */
418: PetscCall(PetscMalloc1(num_cs, &type));
419: numNodes = numVertices;
421: PetscCall(PetscViewerExodusIIGetOrder(viewer, °ree));
422: if (degree == 2) numNodes += numEdges;
423: cellsNotInConnectivity = numCells;
424: for (cs = 0; cs < num_cs; ++cs) {
425: IS stratumIS;
426: const PetscInt *cells;
427: PetscScalar *xyz = NULL;
428: PetscInt csSize, closureSize;
430: PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
431: PetscCall(ISGetIndices(stratumIS, &cells));
432: PetscCall(ISGetSize(stratumIS, &csSize));
433: PetscCall(DMPlexVecGetClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz));
434: switch (dim) {
435: case 2:
436: if (closureSize == 3 * dim) {
437: type[cs] = TRI;
438: } else if (closureSize == 4 * dim) {
439: type[cs] = QUAD;
440: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %" PetscInt_FMT " in dimension %" PetscInt_FMT " has no ExodusII type", closureSize / dim, dim);
441: break;
442: case 3:
443: if (closureSize == 4 * dim) {
444: type[cs] = TET;
445: } else if (closureSize == 8 * dim) {
446: type[cs] = HEX;
447: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %" PetscInt_FMT " in dimension %" PetscInt_FMT " has no ExodusII type", closureSize / dim, dim);
448: break;
449: default:
450: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Dimension %" PetscInt_FMT " not handled by ExodusII viewer", dim);
451: }
452: if ((degree == 2) && (type[cs] == QUAD)) numNodes += csSize;
453: if ((degree == 2) && (type[cs] == HEX)) {
454: numNodes += csSize;
455: numNodes += numFaces;
456: }
457: PetscCall(DMPlexVecRestoreClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz));
458: /* Set nodes and Element type */
459: if (type[cs] == TRI) {
460: if (degree == 1) nodes[cs] = nodesTriP1;
461: else if (degree == 2) nodes[cs] = nodesTriP2;
462: } else if (type[cs] == QUAD) {
463: if (degree == 1) nodes[cs] = nodesQuadP1;
464: else if (degree == 2) nodes[cs] = nodesQuadP2;
465: } else if (type[cs] == TET) {
466: if (degree == 1) nodes[cs] = nodesTetP1;
467: else if (degree == 2) nodes[cs] = nodesTetP2;
468: } else if (type[cs] == HEX) {
469: if (degree == 1) nodes[cs] = nodesHexP1;
470: else if (degree == 2) nodes[cs] = nodesHexP2;
471: }
472: /* Compute the number of cells not in the connectivity table */
473: cellsNotInConnectivity -= nodes[cs][3] * csSize;
475: PetscCall(ISRestoreIndices(stratumIS, &cells));
476: PetscCall(ISDestroy(&stratumIS));
477: }
478: if (num_cs) PetscCallExternal(ex_put_init, exo->exoid, dmName, dim, numNodes, numCells, num_cs, num_vs, num_fs);
479: /* --- Connectivity --- */
480: for (cs = 0; cs < num_cs; ++cs) {
481: IS stratumIS;
482: const PetscInt *cells;
483: PetscInt *connect, off = 0;
484: PetscInt edgesInClosure = 0, facesInClosure = 0, verticesInClosure = 0;
485: PetscInt csSize, c, connectSize, closureSize;
486: char *elem_type = NULL;
487: char elem_type_tri3[] = "TRI3", elem_type_quad4[] = "QUAD4";
488: char elem_type_tri6[] = "TRI6", elem_type_quad9[] = "QUAD9";
489: char elem_type_tet4[] = "TET4", elem_type_hex8[] = "HEX8";
490: char elem_type_tet10[] = "TET10", elem_type_hex27[] = "HEX27";
492: PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
493: PetscCall(ISGetIndices(stratumIS, &cells));
494: PetscCall(ISGetSize(stratumIS, &csSize));
495: /* Set Element type */
496: if (type[cs] == TRI) {
497: if (degree == 1) elem_type = elem_type_tri3;
498: else if (degree == 2) elem_type = elem_type_tri6;
499: } else if (type[cs] == QUAD) {
500: if (degree == 1) elem_type = elem_type_quad4;
501: else if (degree == 2) elem_type = elem_type_quad9;
502: } else if (type[cs] == TET) {
503: if (degree == 1) elem_type = elem_type_tet4;
504: else if (degree == 2) elem_type = elem_type_tet10;
505: } else if (type[cs] == HEX) {
506: if (degree == 1) elem_type = elem_type_hex8;
507: else if (degree == 2) elem_type = elem_type_hex27;
508: }
509: connectSize = nodes[cs][0] + nodes[cs][1] + nodes[cs][2] + nodes[cs][3];
510: PetscCall(PetscMalloc1(PetscMax(27, connectSize) * csSize, &connect));
511: PetscCallExternal(ex_put_block, exo->exoid, EX_ELEM_BLOCK, csIdx[cs], elem_type, csSize, connectSize, 0, 0, 1);
512: /* Find number of vertices, edges, and faces in the closure */
513: verticesInClosure = nodes[cs][0];
514: if (depth > 1) {
515: if (dim == 2) {
516: PetscCall(DMPlexGetConeSize(dm, cells[0], &edgesInClosure));
517: } else if (dim == 3) {
518: PetscInt *closure = NULL;
520: PetscCall(DMPlexGetConeSize(dm, cells[0], &facesInClosure));
521: PetscCall(DMPlexGetTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure));
522: edgesInClosure = closureSize - facesInClosure - 1 - verticesInClosure;
523: PetscCall(DMPlexRestoreTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure));
524: }
525: }
526: /* Get connectivity for each cell */
527: for (c = 0; c < csSize; ++c) {
528: PetscInt *closure = NULL;
529: PetscInt temp, i;
531: PetscCall(DMPlexGetTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure));
532: for (i = 0; i < connectSize; ++i) {
533: if (i < nodes[cs][0]) { /* Vertices */
534: connect[i + off] = closure[(i + edgesInClosure + facesInClosure + 1) * 2] + 1;
535: connect[i + off] -= cellsNotInConnectivity;
536: } else if (i < nodes[cs][0] + nodes[cs][1]) { /* Edges */
537: connect[i + off] = closure[(i - verticesInClosure + facesInClosure + 1) * 2] + 1;
538: if (nodes[cs][2] == 0) connect[i + off] -= numFaces;
539: connect[i + off] -= cellsNotInConnectivity;
540: } else if (i < nodes[cs][0] + nodes[cs][1] + nodes[cs][3]) { /* Cells */
541: connect[i + off] = closure[0] + 1;
542: connect[i + off] -= skipCells;
543: } else if (i < nodes[cs][0] + nodes[cs][1] + nodes[cs][3] + nodes[cs][2]) { /* Faces */
544: connect[i + off] = closure[(i - edgesInClosure - verticesInClosure) * 2] + 1;
545: connect[i + off] -= cellsNotInConnectivity;
546: } else {
547: connect[i + off] = -1;
548: }
549: }
550: /* Tetrahedra are inverted */
551: if (type[cs] == TET) {
552: temp = connect[0 + off];
553: connect[0 + off] = connect[1 + off];
554: connect[1 + off] = temp;
555: if (degree == 2) {
556: temp = connect[5 + off];
557: connect[5 + off] = connect[6 + off];
558: connect[6 + off] = temp;
559: temp = connect[7 + off];
560: connect[7 + off] = connect[8 + off];
561: connect[8 + off] = temp;
562: }
563: }
564: /* Hexahedra are inverted */
565: if (type[cs] == HEX) {
566: temp = connect[1 + off];
567: connect[1 + off] = connect[3 + off];
568: connect[3 + off] = temp;
569: if (degree == 2) {
570: temp = connect[8 + off];
571: connect[8 + off] = connect[11 + off];
572: connect[11 + off] = temp;
573: temp = connect[9 + off];
574: connect[9 + off] = connect[10 + off];
575: connect[10 + off] = temp;
576: temp = connect[16 + off];
577: connect[16 + off] = connect[17 + off];
578: connect[17 + off] = temp;
579: temp = connect[18 + off];
580: connect[18 + off] = connect[19 + off];
581: connect[19 + off] = temp;
583: temp = connect[12 + off];
584: connect[12 + off] = connect[16 + off];
585: connect[16 + off] = temp;
586: temp = connect[13 + off];
587: connect[13 + off] = connect[17 + off];
588: connect[17 + off] = temp;
589: temp = connect[14 + off];
590: connect[14 + off] = connect[18 + off];
591: connect[18 + off] = temp;
592: temp = connect[15 + off];
593: connect[15 + off] = connect[19 + off];
594: connect[19 + off] = temp;
596: temp = connect[23 + off];
597: connect[23 + off] = connect[26 + off];
598: connect[26 + off] = temp;
599: temp = connect[24 + off];
600: connect[24 + off] = connect[25 + off];
601: connect[25 + off] = temp;
602: temp = connect[25 + off];
603: connect[25 + off] = connect[26 + off];
604: connect[26 + off] = temp;
605: }
606: }
607: off += connectSize;
608: PetscCall(DMPlexRestoreTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure));
609: }
610: PetscCallExternal(ex_put_conn, exo->exoid, EX_ELEM_BLOCK, csIdx[cs], connect, 0, 0);
611: skipCells += (nodes[cs][3] == 0) * csSize;
612: PetscCall(PetscFree(connect));
613: PetscCall(ISRestoreIndices(stratumIS, &cells));
614: PetscCall(ISDestroy(&stratumIS));
615: }
616: PetscCall(PetscFree(type));
617: /* --- Coordinates --- */
618: PetscCall(PetscSectionSetChart(coordSection, pStart, pEnd));
619: if (num_cs) {
620: for (d = 0; d < depth; ++d) {
621: PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
622: for (p = pStart; p < pEnd; ++p) PetscCall(PetscSectionSetDof(coordSection, p, nodes[0][d] > 0));
623: }
624: }
625: for (cs = 0; cs < num_cs; ++cs) {
626: IS stratumIS;
627: const PetscInt *cells;
628: PetscInt csSize, c;
630: PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
631: PetscCall(ISGetIndices(stratumIS, &cells));
632: PetscCall(ISGetSize(stratumIS, &csSize));
633: for (c = 0; c < csSize; ++c) PetscCall(PetscSectionSetDof(coordSection, cells[c], nodes[cs][3] > 0));
634: PetscCall(ISRestoreIndices(stratumIS, &cells));
635: PetscCall(ISDestroy(&stratumIS));
636: }
637: if (num_cs) {
638: PetscCall(ISRestoreIndices(csIS, &csIdx));
639: PetscCall(ISDestroy(&csIS));
640: }
641: PetscCall(PetscFree(nodes));
642: PetscCall(PetscSectionSetUp(coordSection));
643: if (numNodes) {
644: const char *coordNames[3] = {"x", "y", "z"};
645: PetscScalar *closure, *cval;
646: PetscReal *coords;
647: PetscInt hasDof, n = 0;
649: /* There can't be more than 24 values in the closure of a point for the coord coordSection */
650: PetscCall(PetscCalloc3(numNodes * 3, &coords, dim, &cval, 24, &closure));
651: PetscCall(DMGetCoordinatesLocalNoncollective(dm, &coord));
652: PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
653: for (p = pStart; p < pEnd; ++p) {
654: PetscCall(PetscSectionGetDof(coordSection, p, &hasDof));
655: if (hasDof) {
656: PetscInt closureSize = 24, j;
658: PetscCall(DMPlexVecGetClosure(cdm, NULL, coord, p, &closureSize, &closure));
659: for (d = 0; d < dim; ++d) {
660: cval[d] = 0.0;
661: for (j = 0; j < closureSize / dim; j++) cval[d] += closure[j * dim + d];
662: coords[d * numNodes + n] = PetscRealPart(cval[d]) * dim / closureSize;
663: }
664: ++n;
665: }
666: }
667: PetscCallExternal(ex_put_coord, exo->exoid, &coords[0 * numNodes], &coords[1 * numNodes], &coords[2 * numNodes]);
668: PetscCall(PetscFree3(coords, cval, closure));
669: PetscCallExternal(ex_put_coord_names, exo->exoid, (char **)coordNames);
670: }
672: /* --- Node Sets/Vertex Sets --- */
673: PetscCall(DMHasLabel(dm, "Vertex Sets", &hasLabel));
674: if (hasLabel) {
675: PetscInt i, vs, vsSize;
676: const PetscInt *vsIdx, *vertices;
677: PetscInt *nodeList;
678: IS vsIS, stratumIS;
679: DMLabel vsLabel;
680: PetscCall(DMGetLabel(dm, "Vertex Sets", &vsLabel));
681: PetscCall(DMLabelGetValueIS(vsLabel, &vsIS));
682: PetscCall(ISGetIndices(vsIS, &vsIdx));
683: for (vs = 0; vs < num_vs; ++vs) {
684: PetscCall(DMLabelGetStratumIS(vsLabel, vsIdx[vs], &stratumIS));
685: PetscCall(ISGetIndices(stratumIS, &vertices));
686: PetscCall(ISGetSize(stratumIS, &vsSize));
687: PetscCall(PetscMalloc1(vsSize, &nodeList));
688: for (i = 0; i < vsSize; ++i) nodeList[i] = vertices[i] - skipCells + 1;
689: PetscCallExternal(ex_put_set_param, exo->exoid, EX_NODE_SET, vsIdx[vs], vsSize, 0);
690: PetscCallExternal(ex_put_set, exo->exoid, EX_NODE_SET, vsIdx[vs], nodeList, NULL);
691: PetscCall(ISRestoreIndices(stratumIS, &vertices));
692: PetscCall(ISDestroy(&stratumIS));
693: PetscCall(PetscFree(nodeList));
694: }
695: PetscCall(ISRestoreIndices(vsIS, &vsIdx));
696: PetscCall(ISDestroy(&vsIS));
697: }
698: /* --- Side Sets/Face Sets --- */
699: PetscCall(DMHasLabel(dm, "Face Sets", &hasLabel));
700: if (hasLabel) {
701: PetscInt i, j, fs, fsSize;
702: const PetscInt *fsIdx, *faces;
703: IS fsIS, stratumIS;
704: DMLabel fsLabel;
705: PetscInt numPoints, *points;
706: PetscInt elem_list_size = 0;
707: PetscInt *elem_list, *elem_ind, *side_list;
709: PetscCall(DMGetLabel(dm, "Face Sets", &fsLabel));
710: /* Compute size of Node List and Element List */
711: PetscCall(DMLabelGetValueIS(fsLabel, &fsIS));
712: PetscCall(ISGetIndices(fsIS, &fsIdx));
713: for (fs = 0; fs < num_fs; ++fs) {
714: PetscCall(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS));
715: PetscCall(ISGetSize(stratumIS, &fsSize));
716: elem_list_size += fsSize;
717: PetscCall(ISDestroy(&stratumIS));
718: }
719: if (num_fs) {
720: PetscCall(PetscMalloc3(num_fs, &elem_ind, elem_list_size, &elem_list, elem_list_size, &side_list));
721: elem_ind[0] = 0;
722: for (fs = 0; fs < num_fs; ++fs) {
723: PetscCall(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS));
724: PetscCall(ISGetIndices(stratumIS, &faces));
725: PetscCall(ISGetSize(stratumIS, &fsSize));
726: /* Set Parameters */
727: PetscCallExternal(ex_put_set_param, exo->exoid, EX_SIDE_SET, fsIdx[fs], fsSize, 0);
728: /* Indices */
729: if (fs < num_fs - 1) elem_ind[fs + 1] = elem_ind[fs] + fsSize;
731: for (i = 0; i < fsSize; ++i) {
732: /* Element List */
733: points = NULL;
734: PetscCall(DMPlexGetTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points));
735: elem_list[elem_ind[fs] + i] = points[2] + 1;
736: PetscCall(DMPlexRestoreTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points));
738: /* Side List */
739: points = NULL;
740: PetscCall(DMPlexGetTransitiveClosure(dm, elem_list[elem_ind[fs] + i] - 1, PETSC_TRUE, &numPoints, &points));
741: for (j = 1; j < numPoints; ++j) {
742: if (points[j * 2] == faces[i]) break;
743: }
744: /* Convert HEX sides */
745: if (numPoints == 27) {
746: if (j == 1) {
747: j = 5;
748: } else if (j == 2) {
749: j = 6;
750: } else if (j == 3) {
751: j = 1;
752: } else if (j == 4) {
753: j = 3;
754: } else if (j == 5) {
755: j = 2;
756: } else if (j == 6) {
757: j = 4;
758: }
759: }
760: /* Convert TET sides */
761: if (numPoints == 15) {
762: --j;
763: if (j == 0) j = 4;
764: }
765: side_list[elem_ind[fs] + i] = j;
766: PetscCall(DMPlexRestoreTransitiveClosure(dm, elem_list[elem_ind[fs] + i] - 1, PETSC_TRUE, &numPoints, &points));
767: }
768: PetscCall(ISRestoreIndices(stratumIS, &faces));
769: PetscCall(ISDestroy(&stratumIS));
770: }
771: PetscCall(ISRestoreIndices(fsIS, &fsIdx));
772: PetscCall(ISDestroy(&fsIS));
774: /* Put side sets */
775: for (fs = 0; fs < num_fs; ++fs) PetscCallExternal(ex_put_set, exo->exoid, EX_SIDE_SET, fsIdx[fs], &elem_list[elem_ind[fs]], &side_list[elem_ind[fs]]);
776: PetscCall(PetscFree3(elem_ind, elem_list, side_list));
777: }
778: }
779: /*
780: close the exodus file
781: */
782: ex_close(exo->exoid);
783: exo->exoid = -1;
784: }
785: PetscCall(PetscSectionDestroy(&coordSection));
787: /*
788: reopen the file in parallel
789: */
790: EXO_mode = EX_WRITE;
791: #if defined(PETSC_USE_64BIT_INDICES)
792: EXO_mode += EX_ALL_INT64_API;
793: #endif
794: CPU_word_size = sizeof(PetscReal);
795: IO_word_size = sizeof(PetscReal);
796: exo->exoid = ex_open_par(exo->filename, EXO_mode, &CPU_word_size, &IO_word_size, &EXO_version, comm, MPI_INFO_NULL);
797: PetscCheck(exo->exoid >= 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", exo->filename);
798: PetscFunctionReturn(PETSC_SUCCESS);
799: }
801: /*
802: VecView_PlexExodusII_Internal - Write a Vec corresponding in an exodus file
804: Collective on v
806: Input Parameters:
807: + v - The vector to be written
808: - viewer - The PetscViewerExodusII viewer associate to an exodus file
810: Notes:
811: The exodus result variable index is obtained by comparing the Vec name and the
812: names of variables declared in the exodus file. For instance for a Vec named "V"
813: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
814: amongst all variables.
815: In the event where a nodal and zonal variable both match, the function will return an error instead of
816: possibly corrupting the file
818: Level: beginner
820: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII()`, `VecView_PlexExodusII()`
821: @*/
822: PetscErrorCode VecView_PlexExodusII_Internal(Vec v, PetscViewer viewer)
823: {
824: DM dm;
825: MPI_Comm comm;
826: PetscMPIInt rank;
828: int exoid, offsetN = 0, offsetZ = 0;
829: const char *vecname;
830: PetscInt step;
832: PetscFunctionBegin;
833: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
834: PetscCallMPI(MPI_Comm_rank(comm, &rank));
835: PetscCall(PetscViewerExodusIIGetId(viewer, &exoid));
836: PetscCall(VecGetDM(v, &dm));
837: PetscCall(PetscObjectGetName((PetscObject)v, &vecname));
839: PetscCall(DMGetOutputSequenceNumber(dm, &step, NULL));
840: PetscCall(EXOGetVarIndex_Internal(exoid, EX_NODAL, vecname, &offsetN));
841: PetscCall(EXOGetVarIndex_Internal(exoid, EX_ELEM_BLOCK, vecname, &offsetZ));
842: PetscCheck(offsetN > 0 || offsetZ > 0, comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname);
843: if (offsetN > 0) {
844: PetscCall(VecViewPlex_ExodusII_Nodal_Internal(v, exoid, (int)step + 1, offsetN));
845: } else if (offsetZ > 0) {
846: PetscCall(VecViewPlex_ExodusII_Zonal_Internal(v, exoid, (int)step + 1, offsetZ));
847: } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname);
848: PetscFunctionReturn(PETSC_SUCCESS);
849: }
851: /*
852: VecLoad_PlexExodusII_Internal - Write a Vec corresponding in an exodus file
854: Collective on v
856: Input Parameters:
857: + v - The vector to be written
858: - viewer - The PetscViewerExodusII viewer associate to an exodus file
860: Notes:
861: The exodus result variable index is obtained by comparing the Vec name and the
862: names of variables declared in the exodus file. For instance for a Vec named "V"
863: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
864: amongst all variables.
865: In the event where a nodal and zonal variable both match, the function will return an error instead of
866: possibly corrupting the file
868: Level: beginner
870: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII()`, `VecView_PlexExodusII()`
871: @*/
872: PetscErrorCode VecLoad_PlexExodusII_Internal(Vec v, PetscViewer viewer)
873: {
874: DM dm;
875: MPI_Comm comm;
876: PetscMPIInt rank;
878: int exoid, offsetN = 0, offsetZ = 0;
879: const char *vecname;
880: PetscInt step;
882: PetscFunctionBegin;
883: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
884: PetscCallMPI(MPI_Comm_rank(comm, &rank));
885: PetscCall(PetscViewerExodusIIGetId(viewer, &exoid));
886: PetscCall(VecGetDM(v, &dm));
887: PetscCall(PetscObjectGetName((PetscObject)v, &vecname));
889: PetscCall(DMGetOutputSequenceNumber(dm, &step, NULL));
890: PetscCall(EXOGetVarIndex_Internal(exoid, EX_NODAL, vecname, &offsetN));
891: PetscCall(EXOGetVarIndex_Internal(exoid, EX_ELEM_BLOCK, vecname, &offsetZ));
892: PetscCheck(offsetN > 0 || offsetZ > 0, comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname);
893: if (offsetN > 0) PetscCall(VecLoadPlex_ExodusII_Nodal_Internal(v, exoid, (int)step + 1, offsetN));
894: else if (offsetZ > 0) PetscCall(VecLoadPlex_ExodusII_Zonal_Internal(v, exoid, (int)step + 1, offsetZ));
895: else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname);
896: PetscFunctionReturn(PETSC_SUCCESS);
897: }
899: /*
900: VecViewPlex_ExodusII_Nodal_Internal - Write a Vec corresponding to a nodal field to an exodus file
902: Collective on v
904: Input Parameters:
905: + v - The vector to be written
906: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
907: . step - the time step to write at (exodus steps are numbered starting from 1)
908: - offset - the location of the variable in the file
910: Notes:
911: The exodus result nodal variable index is obtained by comparing the Vec name and the
912: names of nodal variables declared in the exodus file. For instance for a Vec named "V"
913: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
914: amongst all nodal variables.
916: Level: beginner
918: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecLoadNodal_PlexEXO()`, `VecViewZonal_PlexEXO()`, `VecLoadZonal_PlexEXO()`
919: @*/
920: PetscErrorCode VecViewPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset)
921: {
922: MPI_Comm comm;
923: PetscMPIInt size;
924: DM dm;
925: Vec vNatural, vComp;
926: const PetscScalar *varray;
927: PetscInt xs, xe, bs;
928: PetscBool useNatural;
930: PetscFunctionBegin;
931: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
932: PetscCallMPI(MPI_Comm_size(comm, &size));
933: PetscCall(VecGetDM(v, &dm));
934: PetscCall(DMGetUseNatural(dm, &useNatural));
935: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
936: if (useNatural) {
937: PetscCall(DMPlexCreateNaturalVector(dm, &vNatural));
938: PetscCall(DMPlexGlobalToNaturalBegin(dm, v, vNatural));
939: PetscCall(DMPlexGlobalToNaturalEnd(dm, v, vNatural));
940: } else {
941: vNatural = v;
942: }
944: /* Write local chunk of the result in the exodus file
945: exodus stores each component of a vector-valued field as a separate variable.
946: We assume that they are stored sequentially */
947: PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe));
948: PetscCall(VecGetBlockSize(vNatural, &bs));
949: if (bs == 1) {
950: PetscCall(VecGetArrayRead(vNatural, &varray));
951: PetscCallExternal(ex_put_partial_var, exoid, step, EX_NODAL, offset, 1, xs + 1, xe - xs, varray);
952: PetscCall(VecRestoreArrayRead(vNatural, &varray));
953: } else {
954: IS compIS;
955: PetscInt c;
957: PetscCall(ISCreateStride(comm, (xe - xs) / bs, xs, bs, &compIS));
958: for (c = 0; c < bs; ++c) {
959: PetscCall(ISStrideSetStride(compIS, (xe - xs) / bs, xs + c, bs));
960: PetscCall(VecGetSubVector(vNatural, compIS, &vComp));
961: PetscCall(VecGetArrayRead(vComp, &varray));
962: PetscCallExternal(ex_put_partial_var, exoid, step, EX_NODAL, offset + c, 1, xs / bs + 1, (xe - xs) / bs, varray);
963: PetscCall(VecRestoreArrayRead(vComp, &varray));
964: PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp));
965: }
966: PetscCall(ISDestroy(&compIS));
967: }
968: if (useNatural) PetscCall(VecDestroy(&vNatural));
969: PetscFunctionReturn(PETSC_SUCCESS);
970: }
972: /*
973: VecLoadPlex_ExodusII_Nodal_Internal - Read a Vec corresponding to a nodal field from an exodus file
975: Collective on v
977: Input Parameters:
978: + v - The vector to be written
979: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
980: . step - the time step to read at (exodus steps are numbered starting from 1)
981: - offset - the location of the variable in the file
983: Notes:
984: The exodus result nodal variable index is obtained by comparing the Vec name and the
985: names of nodal variables declared in the exodus file. For instance for a Vec named "V"
986: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
987: amongst all nodal variables.
989: Level: beginner
991: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecViewZonal_PlexEXO()`, `VecLoadZonal_PlexEXO()`
992: */
993: PetscErrorCode VecLoadPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset)
994: {
995: MPI_Comm comm;
996: PetscMPIInt size;
997: DM dm;
998: Vec vNatural, vComp;
999: PetscScalar *varray;
1000: PetscInt xs, xe, bs;
1001: PetscBool useNatural;
1003: PetscFunctionBegin;
1004: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
1005: PetscCallMPI(MPI_Comm_size(comm, &size));
1006: PetscCall(VecGetDM(v, &dm));
1007: PetscCall(DMGetUseNatural(dm, &useNatural));
1008: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1009: if (useNatural) PetscCall(DMPlexCreateNaturalVector(dm, &vNatural));
1010: else vNatural = v;
1012: /* Read local chunk from the file */
1013: PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe));
1014: PetscCall(VecGetBlockSize(vNatural, &bs));
1015: if (bs == 1) {
1016: PetscCall(VecGetArray(vNatural, &varray));
1017: PetscCallExternal(ex_get_partial_var, exoid, step, EX_NODAL, offset, 1, xs + 1, xe - xs, varray);
1018: PetscCall(VecRestoreArray(vNatural, &varray));
1019: } else {
1020: IS compIS;
1021: PetscInt c;
1023: PetscCall(ISCreateStride(comm, (xe - xs) / bs, xs, bs, &compIS));
1024: for (c = 0; c < bs; ++c) {
1025: PetscCall(ISStrideSetStride(compIS, (xe - xs) / bs, xs + c, bs));
1026: PetscCall(VecGetSubVector(vNatural, compIS, &vComp));
1027: PetscCall(VecGetArray(vComp, &varray));
1028: PetscCallExternal(ex_get_partial_var, exoid, step, EX_NODAL, offset + c, 1, xs / bs + 1, (xe - xs) / bs, varray);
1029: PetscCall(VecRestoreArray(vComp, &varray));
1030: PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp));
1031: }
1032: PetscCall(ISDestroy(&compIS));
1033: }
1034: if (useNatural) {
1035: PetscCall(DMPlexNaturalToGlobalBegin(dm, vNatural, v));
1036: PetscCall(DMPlexNaturalToGlobalEnd(dm, vNatural, v));
1037: PetscCall(VecDestroy(&vNatural));
1038: }
1039: PetscFunctionReturn(PETSC_SUCCESS);
1040: }
1042: /*
1043: VecViewPlex_ExodusII_Zonal_Internal - Write a Vec corresponding to a zonal (cell based) field to an exodus file
1045: Collective on v
1047: Input Parameters:
1048: + v - The vector to be written
1049: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
1050: . step - the time step to write at (exodus steps are numbered starting from 1)
1051: - offset - the location of the variable in the file
1053: Notes:
1054: The exodus result zonal variable index is obtained by comparing the Vec name and the
1055: names of zonal variables declared in the exodus file. For instance for a Vec named "V"
1056: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
1057: amongst all zonal variables.
1059: Level: beginner
1061: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Zonal_Internal()`
1062: */
1063: PetscErrorCode VecViewPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset)
1064: {
1065: MPI_Comm comm;
1066: PetscMPIInt size;
1067: DM dm;
1068: Vec vNatural, vComp;
1069: const PetscScalar *varray;
1070: PetscInt xs, xe, bs;
1071: PetscBool useNatural;
1072: IS compIS;
1073: PetscInt *csSize, *csID;
1074: PetscInt numCS, set, csxs = 0;
1076: PetscFunctionBegin;
1077: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
1078: PetscCallMPI(MPI_Comm_size(comm, &size));
1079: PetscCall(VecGetDM(v, &dm));
1080: PetscCall(DMGetUseNatural(dm, &useNatural));
1081: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1082: if (useNatural) {
1083: PetscCall(DMPlexCreateNaturalVector(dm, &vNatural));
1084: PetscCall(DMPlexGlobalToNaturalBegin(dm, v, vNatural));
1085: PetscCall(DMPlexGlobalToNaturalEnd(dm, v, vNatural));
1086: } else {
1087: vNatural = v;
1088: }
1090: /* Write local chunk of the result in the exodus file
1091: exodus stores each component of a vector-valued field as a separate variable.
1092: We assume that they are stored sequentially
1093: Zonal variables are accessed one element block at a time, so we loop through the cell sets,
1094: but once the vector has been reordered to natural size, we cannot use the label information
1095: to figure out what to save where. */
1096: numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
1097: PetscCall(PetscMalloc2(numCS, &csID, numCS, &csSize));
1098: PetscCallExternal(ex_get_ids, exoid, EX_ELEM_BLOCK, csID);
1099: for (set = 0; set < numCS; ++set) {
1100: ex_block block;
1102: block.id = csID[set];
1103: block.type = EX_ELEM_BLOCK;
1104: PetscCallExternal(ex_get_block_param, exoid, &block);
1105: csSize[set] = block.num_entry;
1106: }
1107: PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe));
1108: PetscCall(VecGetBlockSize(vNatural, &bs));
1109: if (bs > 1) PetscCall(ISCreateStride(comm, (xe - xs) / bs, xs, bs, &compIS));
1110: for (set = 0; set < numCS; set++) {
1111: PetscInt csLocalSize, c;
1113: /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1
1114: local slice of zonal values: xs/bs,xm/bs-1
1115: intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */
1116: csLocalSize = PetscMax(0, PetscMin(xe / bs, csxs + csSize[set]) - PetscMax(xs / bs, csxs));
1117: if (bs == 1) {
1118: PetscCall(VecGetArrayRead(vNatural, &varray));
1119: PetscCallExternal(ex_put_partial_var, exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs - csxs, 0) + 1, csLocalSize, &varray[PetscMax(0, csxs - xs)]);
1120: PetscCall(VecRestoreArrayRead(vNatural, &varray));
1121: } else {
1122: for (c = 0; c < bs; ++c) {
1123: PetscCall(ISStrideSetStride(compIS, (xe - xs) / bs, xs + c, bs));
1124: PetscCall(VecGetSubVector(vNatural, compIS, &vComp));
1125: PetscCall(VecGetArrayRead(vComp, &varray));
1126: PetscCallExternal(ex_put_partial_var, exoid, step, EX_ELEM_BLOCK, offset + c, csID[set], PetscMax(xs / bs - csxs, 0) + 1, csLocalSize, &varray[PetscMax(0, csxs - xs / bs)]);
1127: PetscCall(VecRestoreArrayRead(vComp, &varray));
1128: PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp));
1129: }
1130: }
1131: csxs += csSize[set];
1132: }
1133: PetscCall(PetscFree2(csID, csSize));
1134: if (bs > 1) PetscCall(ISDestroy(&compIS));
1135: if (useNatural) PetscCall(VecDestroy(&vNatural));
1136: PetscFunctionReturn(PETSC_SUCCESS);
1137: }
1139: /*
1140: VecLoadPlex_ExodusII_Zonal_Internal - Read a Vec corresponding to a zonal (cell based) field from an exodus file
1142: Collective on v
1144: Input Parameters:
1145: + v - The vector to be written
1146: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
1147: . step - the time step to read at (exodus steps are numbered starting from 1)
1148: - offset - the location of the variable in the file
1150: Notes:
1151: The exodus result zonal variable index is obtained by comparing the Vec name and the
1152: names of zonal variables declared in the exodus file. For instance for a Vec named "V"
1153: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
1154: amongst all zonal variables.
1156: Level: beginner
1158: .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Zonal_Internal()`
1159: */
1160: PetscErrorCode VecLoadPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset)
1161: {
1162: MPI_Comm comm;
1163: PetscMPIInt size;
1164: DM dm;
1165: Vec vNatural, vComp;
1166: PetscScalar *varray;
1167: PetscInt xs, xe, bs;
1168: PetscBool useNatural;
1169: IS compIS;
1170: PetscInt *csSize, *csID;
1171: PetscInt numCS, set, csxs = 0;
1173: PetscFunctionBegin;
1174: PetscCall(PetscObjectGetComm((PetscObject)v, &comm));
1175: PetscCallMPI(MPI_Comm_size(comm, &size));
1176: PetscCall(VecGetDM(v, &dm));
1177: PetscCall(DMGetUseNatural(dm, &useNatural));
1178: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1179: if (useNatural) PetscCall(DMPlexCreateNaturalVector(dm, &vNatural));
1180: else vNatural = v;
1182: /* Read local chunk of the result in the exodus file
1183: exodus stores each component of a vector-valued field as a separate variable.
1184: We assume that they are stored sequentially
1185: Zonal variables are accessed one element block at a time, so we loop through the cell sets,
1186: but once the vector has been reordered to natural size, we cannot use the label information
1187: to figure out what to save where. */
1188: numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
1189: PetscCall(PetscMalloc2(numCS, &csID, numCS, &csSize));
1190: PetscCallExternal(ex_get_ids, exoid, EX_ELEM_BLOCK, csID);
1191: for (set = 0; set < numCS; ++set) {
1192: ex_block block;
1194: block.id = csID[set];
1195: block.type = EX_ELEM_BLOCK;
1196: PetscCallExternal(ex_get_block_param, exoid, &block);
1197: csSize[set] = block.num_entry;
1198: }
1199: PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe));
1200: PetscCall(VecGetBlockSize(vNatural, &bs));
1201: if (bs > 1) PetscCall(ISCreateStride(comm, (xe - xs) / bs, xs, bs, &compIS));
1202: for (set = 0; set < numCS; ++set) {
1203: PetscInt csLocalSize, c;
1205: /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1
1206: local slice of zonal values: xs/bs,xm/bs-1
1207: intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */
1208: csLocalSize = PetscMax(0, PetscMin(xe / bs, csxs + csSize[set]) - PetscMax(xs / bs, csxs));
1209: if (bs == 1) {
1210: PetscCall(VecGetArray(vNatural, &varray));
1211: PetscCallExternal(ex_get_partial_var, exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs - csxs, 0) + 1, csLocalSize, &varray[PetscMax(0, csxs - xs)]);
1212: PetscCall(VecRestoreArray(vNatural, &varray));
1213: } else {
1214: for (c = 0; c < bs; ++c) {
1215: PetscCall(ISStrideSetStride(compIS, (xe - xs) / bs, xs + c, bs));
1216: PetscCall(VecGetSubVector(vNatural, compIS, &vComp));
1217: PetscCall(VecGetArray(vComp, &varray));
1218: PetscCallExternal(ex_get_partial_var, exoid, step, EX_ELEM_BLOCK, offset + c, csID[set], PetscMax(xs / bs - csxs, 0) + 1, csLocalSize, &varray[PetscMax(0, csxs - xs / bs)]);
1219: PetscCall(VecRestoreArray(vComp, &varray));
1220: PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp));
1221: }
1222: }
1223: csxs += csSize[set];
1224: }
1225: PetscCall(PetscFree2(csID, csSize));
1226: if (bs > 1) PetscCall(ISDestroy(&compIS));
1227: if (useNatural) {
1228: PetscCall(DMPlexNaturalToGlobalBegin(dm, vNatural, v));
1229: PetscCall(DMPlexNaturalToGlobalEnd(dm, vNatural, v));
1230: PetscCall(VecDestroy(&vNatural));
1231: }
1232: PetscFunctionReturn(PETSC_SUCCESS);
1233: }
1234: #endif
1236: /*@
1237: PetscViewerExodusIIGetId - Get the file id of the `PETSCVIEWEREXODUSII` file
1239: Logically Collective on viewer
1241: Input Parameter:
1242: . viewer - the `PetscViewer`
1244: Output Parameter:
1245: . exoid - The ExodusII file id
1247: Level: intermediate
1249: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewerFileSetMode()`, `PetscViewerCreate()`, `PetscViewerSetType()`, `PetscViewerBinaryOpen()`
1250: @*/
1251: PetscErrorCode PetscViewerExodusIIGetId(PetscViewer viewer, int *exoid)
1252: {
1253: PetscFunctionBegin;
1255: PetscTryMethod(viewer, "PetscViewerGetId_C", (PetscViewer, int *), (viewer, exoid));
1256: PetscFunctionReturn(PETSC_SUCCESS);
1257: }
1259: /*@
1260: PetscViewerExodusIISetOrder - Set the elements order in the exodusII file.
1262: Collective
1264: Input Parameters:
1265: + viewer - the `PETSCVIEWEREXODUSII` viewer
1266: - order - elements order
1268: Output Parameter:
1270: Level: beginner
1272: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewerExodusIIGetId()`, `PetscViewerExodusIIGetOrder()`, `PetscViewerExodusIISetOrder()`
1273: @*/
1274: PetscErrorCode PetscViewerExodusIISetOrder(PetscViewer viewer, PetscInt order)
1275: {
1276: PetscFunctionBegin;
1278: PetscTryMethod(viewer, "PetscViewerSetOrder_C", (PetscViewer, PetscInt), (viewer, order));
1279: PetscFunctionReturn(PETSC_SUCCESS);
1280: }
1282: /*@
1283: PetscViewerExodusIIGetOrder - Get the elements order in the exodusII file.
1285: Collective
1287: Input Parameters:
1288: + viewer - the `PETSCVIEWEREXODUSII` viewer
1289: - order - elements order
1291: Output Parameter:
1293: Level: beginner
1295: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewerExodusIIGetId()`, `PetscViewerExodusIIGetOrder()`, `PetscViewerExodusIISetOrder()`
1296: @*/
1297: PetscErrorCode PetscViewerExodusIIGetOrder(PetscViewer viewer, PetscInt *order)
1298: {
1299: PetscFunctionBegin;
1301: PetscTryMethod(viewer, "PetscViewerGetOrder_C", (PetscViewer, PetscInt *), (viewer, order));
1302: PetscFunctionReturn(PETSC_SUCCESS);
1303: }
1305: /*@C
1306: PetscViewerExodusIIOpen - Opens a file for ExodusII input/output.
1308: Collective
1310: Input Parameters:
1311: + comm - MPI communicator
1312: . name - name of file
1313: - type - type of file
1314: .vb
1315: FILE_MODE_WRITE - create new file for binary output
1316: FILE_MODE_READ - open existing file for binary input
1317: FILE_MODE_APPEND - open existing file for binary output
1318: .ve
1320: Output Parameter:
1321: . exo - `PETSCVIEWEREXODUSII` `PetscViewer` for Exodus II input/output to use with the specified file
1323: Level: beginner
1325: .seealso: `PETSCVIEWEREXODUSII`, `PetscViewer`, `PetscViewerPushFormat()`, `PetscViewerDestroy()`,
1326: `DMLoad()`, `PetscFileMode`, `PetscViewer`, `PetscViewerSetType()`, `PetscViewerFileSetMode()`, `PetscViewerFileSetName()`
1327: @*/
1328: PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm comm, const char name[], PetscFileMode type, PetscViewer *exo)
1329: {
1330: PetscFunctionBegin;
1331: PetscCall(PetscViewerCreate(comm, exo));
1332: PetscCall(PetscViewerSetType(*exo, PETSCVIEWEREXODUSII));
1333: PetscCall(PetscViewerFileSetMode(*exo, type));
1334: PetscCall(PetscViewerFileSetName(*exo, name));
1335: PetscCall(PetscViewerSetFromOptions(*exo));
1336: PetscFunctionReturn(PETSC_SUCCESS);
1337: }
1339: /*@C
1340: DMPlexCreateExodusFromFile - Create a `DMPLEX` mesh from an ExodusII file.
1342: Collective
1344: Input Parameters:
1345: + comm - The MPI communicator
1346: . filename - The name of the ExodusII file
1347: - interpolate - Create faces and edges in the mesh
1349: Output Parameter:
1350: . dm - The `DM` object representing the mesh
1352: Level: beginner
1354: .seealso: [](chapter_unstructured), `DM`, `PETSCVIEWEREXODUSII`, `DMPLEX`, `DMCreate()`, `DMPlexCreateExodus()`
1355: @*/
1356: PetscErrorCode DMPlexCreateExodusFromFile(MPI_Comm comm, const char filename[], PetscBool interpolate, DM *dm)
1357: {
1358: PetscMPIInt rank;
1359: #if defined(PETSC_HAVE_EXODUSII)
1360: int CPU_word_size = sizeof(PetscReal), IO_word_size = 0, exoid = -1;
1361: float version;
1362: #endif
1364: PetscFunctionBegin;
1366: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1367: #if defined(PETSC_HAVE_EXODUSII)
1368: if (rank == 0) {
1369: exoid = ex_open(filename, EX_READ, &CPU_word_size, &IO_word_size, &version);
1370: PetscCheck(exoid > 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open(\"%s\",...) did not return a valid file ID", filename);
1371: }
1372: PetscCall(DMPlexCreateExodus(comm, exoid, interpolate, dm));
1373: if (rank == 0) PetscCallExternal(ex_close, exoid);
1374: PetscFunctionReturn(PETSC_SUCCESS);
1375: #else
1376: SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1377: #endif
1378: }
1380: #if defined(PETSC_HAVE_EXODUSII)
1381: static PetscErrorCode ExodusGetCellType_Internal(const char *elem_type, DMPolytopeType *ct)
1382: {
1383: PetscBool flg;
1385: PetscFunctionBegin;
1386: *ct = DM_POLYTOPE_UNKNOWN;
1387: PetscCall(PetscStrcmp(elem_type, "TRI", &flg));
1388: if (flg) {
1389: *ct = DM_POLYTOPE_TRIANGLE;
1390: goto done;
1391: }
1392: PetscCall(PetscStrcmp(elem_type, "TRI3", &flg));
1393: if (flg) {
1394: *ct = DM_POLYTOPE_TRIANGLE;
1395: goto done;
1396: }
1397: PetscCall(PetscStrcmp(elem_type, "QUAD", &flg));
1398: if (flg) {
1399: *ct = DM_POLYTOPE_QUADRILATERAL;
1400: goto done;
1401: }
1402: PetscCall(PetscStrcmp(elem_type, "QUAD4", &flg));
1403: if (flg) {
1404: *ct = DM_POLYTOPE_QUADRILATERAL;
1405: goto done;
1406: }
1407: PetscCall(PetscStrcmp(elem_type, "SHELL4", &flg));
1408: if (flg) {
1409: *ct = DM_POLYTOPE_QUADRILATERAL;
1410: goto done;
1411: }
1412: PetscCall(PetscStrcmp(elem_type, "TETRA", &flg));
1413: if (flg) {
1414: *ct = DM_POLYTOPE_TETRAHEDRON;
1415: goto done;
1416: }
1417: PetscCall(PetscStrcmp(elem_type, "TET4", &flg));
1418: if (flg) {
1419: *ct = DM_POLYTOPE_TETRAHEDRON;
1420: goto done;
1421: }
1422: PetscCall(PetscStrcmp(elem_type, "WEDGE", &flg));
1423: if (flg) {
1424: *ct = DM_POLYTOPE_TRI_PRISM;
1425: goto done;
1426: }
1427: PetscCall(PetscStrcmp(elem_type, "HEX", &flg));
1428: if (flg) {
1429: *ct = DM_POLYTOPE_HEXAHEDRON;
1430: goto done;
1431: }
1432: PetscCall(PetscStrcmp(elem_type, "HEX8", &flg));
1433: if (flg) {
1434: *ct = DM_POLYTOPE_HEXAHEDRON;
1435: goto done;
1436: }
1437: PetscCall(PetscStrcmp(elem_type, "HEXAHEDRON", &flg));
1438: if (flg) {
1439: *ct = DM_POLYTOPE_HEXAHEDRON;
1440: goto done;
1441: }
1442: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unrecognized element type %s", elem_type);
1443: done:
1444: PetscFunctionReturn(PETSC_SUCCESS);
1445: }
1446: #endif
1448: /*@
1449: DMPlexCreateExodus - Create a `DMPLEX` mesh from an ExodusII file ID.
1451: Collective
1453: Input Parameters:
1454: + comm - The MPI communicator
1455: . exoid - The ExodusII id associated with a exodus file and obtained using ex_open
1456: - interpolate - Create faces and edges in the mesh
1458: Output Parameter:
1459: . dm - The `DM` object representing the mesh
1461: Level: beginner
1463: .seealso: [](chapter_unstructured), `DM`, `PETSCVIEWEREXODUSII`, `DMPLEX`, `DMPLEX`, `DMCreate()`
1464: @*/
1465: PetscErrorCode DMPlexCreateExodus(MPI_Comm comm, PetscInt exoid, PetscBool interpolate, DM *dm)
1466: {
1467: #if defined(PETSC_HAVE_EXODUSII)
1468: PetscMPIInt num_proc, rank;
1469: DMLabel cellSets = NULL, faceSets = NULL, vertSets = NULL;
1470: PetscSection coordSection;
1471: Vec coordinates;
1472: PetscScalar *coords;
1473: PetscInt coordSize, v;
1474: /* Read from ex_get_init() */
1475: char title[PETSC_MAX_PATH_LEN + 1];
1476: int dim = 0, dimEmbed = 0, numVertices = 0, numCells = 0;
1477: int num_cs = 0, num_vs = 0, num_fs = 0;
1478: #endif
1480: PetscFunctionBegin;
1481: #if defined(PETSC_HAVE_EXODUSII)
1482: PetscCallMPI(MPI_Comm_rank(comm, &rank));
1483: PetscCallMPI(MPI_Comm_size(comm, &num_proc));
1484: PetscCall(DMCreate(comm, dm));
1485: PetscCall(DMSetType(*dm, DMPLEX));
1486: /* Open EXODUS II file and read basic information on rank 0, then broadcast to all processors */
1487: if (rank == 0) {
1488: PetscCall(PetscMemzero(title, PETSC_MAX_PATH_LEN + 1));
1489: PetscCallExternal(ex_get_init, exoid, title, &dimEmbed, &numVertices, &numCells, &num_cs, &num_vs, &num_fs);
1490: PetscCheck(num_cs, PETSC_COMM_SELF, PETSC_ERR_SUP, "Exodus file does not contain any cell set");
1491: }
1492: PetscCallMPI(MPI_Bcast(title, PETSC_MAX_PATH_LEN + 1, MPI_CHAR, 0, comm));
1493: PetscCallMPI(MPI_Bcast(&dim, 1, MPI_INT, 0, comm));
1494: PetscCall(PetscObjectSetName((PetscObject)*dm, title));
1495: PetscCall(DMPlexSetChart(*dm, 0, numCells + numVertices));
1496: /* We do not want this label automatically computed, instead we compute it here */
1497: PetscCall(DMCreateLabel(*dm, "celltype"));
1499: /* Read cell sets information */
1500: if (rank == 0) {
1501: PetscInt *cone;
1502: int c, cs, ncs, c_loc, v, v_loc;
1503: /* Read from ex_get_elem_blk_ids() */
1504: int *cs_id, *cs_order;
1505: /* Read from ex_get_elem_block() */
1506: char buffer[PETSC_MAX_PATH_LEN + 1];
1507: int num_cell_in_set, num_vertex_per_cell, num_hybrid, num_attr;
1508: /* Read from ex_get_elem_conn() */
1509: int *cs_connect;
1511: /* Get cell sets IDs */
1512: PetscCall(PetscMalloc2(num_cs, &cs_id, num_cs, &cs_order));
1513: PetscCallExternal(ex_get_ids, exoid, EX_ELEM_BLOCK, cs_id);
1514: /* Read the cell set connectivity table and build mesh topology
1515: EXO standard requires that cells in cell sets be numbered sequentially and be pairwise disjoint. */
1516: /* Check for a hybrid mesh */
1517: for (cs = 0, num_hybrid = 0; cs < num_cs; ++cs) {
1518: DMPolytopeType ct;
1519: char elem_type[PETSC_MAX_PATH_LEN];
1521: PetscCall(PetscArrayzero(elem_type, sizeof(elem_type)));
1522: PetscCallExternal(ex_get_elem_type, exoid, cs_id[cs], elem_type);
1523: PetscCall(ExodusGetCellType_Internal(elem_type, &ct));
1524: dim = PetscMax(dim, DMPolytopeTypeGetDim(ct));
1525: PetscCallExternal(ex_get_block, exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr);
1526: switch (ct) {
1527: case DM_POLYTOPE_TRI_PRISM:
1528: cs_order[cs] = cs;
1529: ++num_hybrid;
1530: break;
1531: default:
1532: for (c = cs; c > cs - num_hybrid; --c) cs_order[c] = cs_order[c - 1];
1533: cs_order[cs - num_hybrid] = cs;
1534: }
1535: }
1536: /* First set sizes */
1537: for (ncs = 0, c = 0; ncs < num_cs; ++ncs) {
1538: DMPolytopeType ct;
1539: char elem_type[PETSC_MAX_PATH_LEN];
1540: const PetscInt cs = cs_order[ncs];
1542: PetscCall(PetscArrayzero(elem_type, sizeof(elem_type)));
1543: PetscCallExternal(ex_get_elem_type, exoid, cs_id[cs], elem_type);
1544: PetscCall(ExodusGetCellType_Internal(elem_type, &ct));
1545: PetscCallExternal(ex_get_block, exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr);
1546: for (c_loc = 0; c_loc < num_cell_in_set; ++c_loc, ++c) {
1547: PetscCall(DMPlexSetConeSize(*dm, c, num_vertex_per_cell));
1548: PetscCall(DMPlexSetCellType(*dm, c, ct));
1549: }
1550: }
1551: for (v = numCells; v < numCells + numVertices; ++v) PetscCall(DMPlexSetCellType(*dm, v, DM_POLYTOPE_POINT));
1552: PetscCall(DMSetUp(*dm));
1553: for (ncs = 0, c = 0; ncs < num_cs; ++ncs) {
1554: const PetscInt cs = cs_order[ncs];
1555: PetscCallExternal(ex_get_block, exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr);
1556: PetscCall(PetscMalloc2(num_vertex_per_cell * num_cell_in_set, &cs_connect, num_vertex_per_cell, &cone));
1557: PetscCallExternal(ex_get_conn, exoid, EX_ELEM_BLOCK, cs_id[cs], cs_connect, NULL, NULL);
1558: /* EXO uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */
1559: for (c_loc = 0, v = 0; c_loc < num_cell_in_set; ++c_loc, ++c) {
1560: DMPolytopeType ct;
1562: for (v_loc = 0; v_loc < num_vertex_per_cell; ++v_loc, ++v) cone[v_loc] = cs_connect[v] + numCells - 1;
1563: PetscCall(DMPlexGetCellType(*dm, c, &ct));
1564: PetscCall(DMPlexInvertCell(ct, cone));
1565: PetscCall(DMPlexSetCone(*dm, c, cone));
1566: PetscCall(DMSetLabelValue_Fast(*dm, &cellSets, "Cell Sets", c, cs_id[cs]));
1567: }
1568: PetscCall(PetscFree2(cs_connect, cone));
1569: }
1570: PetscCall(PetscFree2(cs_id, cs_order));
1571: }
1572: {
1573: PetscInt ints[] = {dim, dimEmbed};
1575: PetscCallMPI(MPI_Bcast(ints, 2, MPIU_INT, 0, comm));
1576: PetscCall(DMSetDimension(*dm, ints[0]));
1577: PetscCall(DMSetCoordinateDim(*dm, ints[1]));
1578: dim = ints[0];
1579: dimEmbed = ints[1];
1580: }
1581: PetscCall(DMPlexSymmetrize(*dm));
1582: PetscCall(DMPlexStratify(*dm));
1583: if (interpolate) {
1584: DM idm;
1586: PetscCall(DMPlexInterpolate(*dm, &idm));
1587: PetscCall(DMDestroy(dm));
1588: *dm = idm;
1589: }
1591: /* Create vertex set label */
1592: if (rank == 0 && (num_vs > 0)) {
1593: int vs, v;
1594: /* Read from ex_get_node_set_ids() */
1595: int *vs_id;
1596: /* Read from ex_get_node_set_param() */
1597: int num_vertex_in_set;
1598: /* Read from ex_get_node_set() */
1599: int *vs_vertex_list;
1601: /* Get vertex set ids */
1602: PetscCall(PetscMalloc1(num_vs, &vs_id));
1603: PetscCallExternal(ex_get_ids, exoid, EX_NODE_SET, vs_id);
1604: for (vs = 0; vs < num_vs; ++vs) {
1605: PetscCallExternal(ex_get_set_param, exoid, EX_NODE_SET, vs_id[vs], &num_vertex_in_set, NULL);
1606: PetscCall(PetscMalloc1(num_vertex_in_set, &vs_vertex_list));
1607: PetscCallExternal(ex_get_set, exoid, EX_NODE_SET, vs_id[vs], vs_vertex_list, NULL);
1608: for (v = 0; v < num_vertex_in_set; ++v) PetscCall(DMSetLabelValue_Fast(*dm, &vertSets, "Vertex Sets", vs_vertex_list[v] + numCells - 1, vs_id[vs]));
1609: PetscCall(PetscFree(vs_vertex_list));
1610: }
1611: PetscCall(PetscFree(vs_id));
1612: }
1613: /* Read coordinates */
1614: PetscCall(DMGetCoordinateSection(*dm, &coordSection));
1615: PetscCall(PetscSectionSetNumFields(coordSection, 1));
1616: PetscCall(PetscSectionSetFieldComponents(coordSection, 0, dimEmbed));
1617: PetscCall(PetscSectionSetChart(coordSection, numCells, numCells + numVertices));
1618: for (v = numCells; v < numCells + numVertices; ++v) {
1619: PetscCall(PetscSectionSetDof(coordSection, v, dimEmbed));
1620: PetscCall(PetscSectionSetFieldDof(coordSection, v, 0, dimEmbed));
1621: }
1622: PetscCall(PetscSectionSetUp(coordSection));
1623: PetscCall(PetscSectionGetStorageSize(coordSection, &coordSize));
1624: PetscCall(VecCreate(PETSC_COMM_SELF, &coordinates));
1625: PetscCall(PetscObjectSetName((PetscObject)coordinates, "coordinates"));
1626: PetscCall(VecSetSizes(coordinates, coordSize, PETSC_DETERMINE));
1627: PetscCall(VecSetBlockSize(coordinates, dimEmbed));
1628: PetscCall(VecSetType(coordinates, VECSTANDARD));
1629: PetscCall(VecGetArray(coordinates, &coords));
1630: if (rank == 0) {
1631: PetscReal *x, *y, *z;
1633: PetscCall(PetscMalloc3(numVertices, &x, numVertices, &y, numVertices, &z));
1634: PetscCallExternal(ex_get_coord, exoid, x, y, z);
1635: if (dimEmbed > 0) {
1636: for (v = 0; v < numVertices; ++v) coords[v * dimEmbed + 0] = x[v];
1637: }
1638: if (dimEmbed > 1) {
1639: for (v = 0; v < numVertices; ++v) coords[v * dimEmbed + 1] = y[v];
1640: }
1641: if (dimEmbed > 2) {
1642: for (v = 0; v < numVertices; ++v) coords[v * dimEmbed + 2] = z[v];
1643: }
1644: PetscCall(PetscFree3(x, y, z));
1645: }
1646: PetscCall(VecRestoreArray(coordinates, &coords));
1647: PetscCall(DMSetCoordinatesLocal(*dm, coordinates));
1648: PetscCall(VecDestroy(&coordinates));
1650: /* Create side set label */
1651: if (rank == 0 && interpolate && (num_fs > 0)) {
1652: int fs, f, voff;
1653: /* Read from ex_get_side_set_ids() */
1654: int *fs_id;
1655: /* Read from ex_get_side_set_param() */
1656: int num_side_in_set;
1657: /* Read from ex_get_side_set_node_list() */
1658: int *fs_vertex_count_list, *fs_vertex_list;
1659: /* Read side set labels */
1660: char fs_name[MAX_STR_LENGTH + 1];
1661: size_t fs_name_len;
1663: /* Get side set ids */
1664: PetscCall(PetscMalloc1(num_fs, &fs_id));
1665: PetscCallExternal(ex_get_ids, exoid, EX_SIDE_SET, fs_id);
1666: for (fs = 0; fs < num_fs; ++fs) {
1667: PetscCallExternal(ex_get_set_param, exoid, EX_SIDE_SET, fs_id[fs], &num_side_in_set, NULL);
1668: PetscCall(PetscMalloc2(num_side_in_set, &fs_vertex_count_list, num_side_in_set * 4, &fs_vertex_list));
1669: PetscCallExternal(ex_get_side_set_node_list, exoid, fs_id[fs], fs_vertex_count_list, fs_vertex_list);
1670: /* Get the specific name associated with this side set ID. */
1671: int fs_name_err = ex_get_name(exoid, EX_SIDE_SET, fs_id[fs], fs_name);
1672: if (!fs_name_err) {
1673: PetscCall(PetscStrlen(fs_name, &fs_name_len));
1674: if (fs_name_len == 0) PetscCall(PetscStrncpy(fs_name, "Face Sets", MAX_STR_LENGTH));
1675: }
1676: for (f = 0, voff = 0; f < num_side_in_set; ++f) {
1677: const PetscInt *faces = NULL;
1678: PetscInt faceSize = fs_vertex_count_list[f], numFaces;
1679: PetscInt faceVertices[4], v;
1681: PetscCheck(faceSize <= 4, comm, PETSC_ERR_ARG_WRONG, "ExodusII side cannot have %" PetscInt_FMT " > 4 vertices", faceSize);
1682: for (v = 0; v < faceSize; ++v, ++voff) faceVertices[v] = fs_vertex_list[voff] + numCells - 1;
1683: PetscCall(DMPlexGetFullJoin(*dm, faceSize, faceVertices, &numFaces, &faces));
1684: PetscCheck(numFaces == 1, comm, PETSC_ERR_ARG_WRONG, "Invalid ExodusII side %d in set %d maps to %" PetscInt_FMT " faces", f, fs, numFaces);
1685: PetscCall(DMSetLabelValue_Fast(*dm, &faceSets, "Face Sets", faces[0], fs_id[fs]));
1686: /* Only add the label if one has been detected for this side set. */
1687: if (!fs_name_err) PetscCall(DMSetLabelValue(*dm, fs_name, faces[0], fs_id[fs]));
1688: PetscCall(DMPlexRestoreJoin(*dm, faceSize, faceVertices, &numFaces, &faces));
1689: }
1690: PetscCall(PetscFree2(fs_vertex_count_list, fs_vertex_list));
1691: }
1692: PetscCall(PetscFree(fs_id));
1693: }
1695: { /* Create Cell/Face/Vertex Sets labels at all processes */
1696: enum {
1697: n = 3
1698: };
1699: PetscBool flag[n];
1701: flag[0] = cellSets ? PETSC_TRUE : PETSC_FALSE;
1702: flag[1] = faceSets ? PETSC_TRUE : PETSC_FALSE;
1703: flag[2] = vertSets ? PETSC_TRUE : PETSC_FALSE;
1704: PetscCallMPI(MPI_Bcast(flag, n, MPIU_BOOL, 0, comm));
1705: if (flag[0]) PetscCall(DMCreateLabel(*dm, "Cell Sets"));
1706: if (flag[1]) PetscCall(DMCreateLabel(*dm, "Face Sets"));
1707: if (flag[2]) PetscCall(DMCreateLabel(*dm, "Vertex Sets"));
1708: }
1709: PetscFunctionReturn(PETSC_SUCCESS);
1710: #else
1711: SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1712: #endif
1713: }