Actual source code: grvtk.c
petsc-3.5.2 2014-09-08
1: #include <petsc-private/dmdaimpl.h>
2: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
6: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
7: {
8: #if defined(PETSC_USE_REAL_SINGLE)
9: const char precision[] = "Float32";
10: #elif defined(PETSC_USE_REAL_DOUBLE)
11: const char precision[] = "Float64";
12: #else
13: const char precision[] = "UnknownPrecision";
14: #endif
15: MPI_Comm comm;
16: PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data;
17: PetscViewerVTKObjectLink link;
18: FILE *fp;
19: PetscMPIInt rank,size,tag;
20: DMDALocalInfo info;
21: PetscInt dim,mx,my,mz,bs,boffset,maxnnodes,i,j,k,f,r;
22: PetscInt rloc[6],(*grloc)[6] = NULL;
23: PetscScalar *array,*array2;
24: PetscReal gmin[3],gmax[3];
25: PetscErrorCode ierr;
28: PetscObjectGetComm((PetscObject)da,&comm);
29: #if defined(PETSC_USE_COMPLEX)
30: SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
31: #endif
32: MPI_Comm_size(comm,&size);
33: MPI_Comm_rank(comm,&rank);
34: DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);
35: DMDAGetLocalInfo(da,&info);
36: DMDAGetBoundingBox(da,gmin,gmax);
38: PetscFOpen(comm,vtk->filename,"wb",&fp);
39: PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
40: #if defined(PETSC_WORDS_BIGENDIAN)
41: PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");
42: #else
43: PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");
44: #endif
45: PetscFPrintf(comm,fp," <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);
47: if (!rank) {PetscMalloc1(size*6,&grloc);}
48: rloc[0] = info.xs;
49: rloc[1] = info.xm;
50: rloc[2] = info.ys;
51: rloc[3] = info.ym;
52: rloc[4] = info.zs;
53: rloc[5] = info.zm;
54: MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);
56: /* Write XML header */
57: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
58: boffset = 0; /* Offset into binary file */
59: for (r=0; r<size; r++) {
60: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
61: if (!rank) {
62: xs = grloc[r][0];
63: xm = grloc[r][1];
64: ys = grloc[r][2];
65: ym = grloc[r][3];
66: zs = grloc[r][4];
67: zm = grloc[r][5];
68: nnodes = xm*ym*zm;
69: }
70: maxnnodes = PetscMax(maxnnodes,nnodes);
71: #if 0
72: switch (dim) {
73: case 1:
74: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,0,0,0,0);
75: break;
76: case 2:
77: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm,ys+ym-1,xs,xs+xm-1,0,0);
78: break;
79: case 3:
80: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
81: break;
82: default: SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"No support for dimension %D",dim);
83: }
84: #endif
85: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
86: PetscFPrintf(comm,fp," <Points>\n");
87: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
88: boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
89: PetscFPrintf(comm,fp," </Points>\n");
91: PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");
92: for (link=vtk->link; link; link=link->next) {
93: Vec X = (Vec)link->vec;
94: const char *vecname = "";
95: if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
96: PetscObjectGetName((PetscObject)X,&vecname);
97: }
98: for (i=0; i<bs; i++) {
99: char buf[256];
100: const char *fieldname;
101: DMDAGetFieldName(da,i,&fieldname);
102: if (!fieldname) {
103: PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);
104: fieldname = buf;
105: }
106: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
107: boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
108: }
109: }
110: PetscFPrintf(comm,fp," </PointData>\n");
111: PetscFPrintf(comm,fp," </Piece>\n");
112: }
113: PetscFPrintf(comm,fp," </StructuredGrid>\n");
114: PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");
115: PetscFPrintf(comm,fp,"_");
117: /* Now write the arrays. */
118: tag = ((PetscObject)viewer)->tag;
119: PetscMalloc2(maxnnodes*PetscMax(3,bs),&array,maxnnodes*3,&array2);
120: for (r=0; r<size; r++) {
121: MPI_Status status;
122: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
123: if (!rank) {
124: xs = grloc[r][0];
125: xm = grloc[r][1];
126: ys = grloc[r][2];
127: ym = grloc[r][3];
128: zs = grloc[r][4];
129: zm = grloc[r][5];
130: nnodes = xm*ym*zm;
131: } else if (r == rank) {
132: nnodes = info.xm*info.ym*info.zm;
133: }
135: { /* Write the coordinates */
136: Vec Coords;
137: DMGetCoordinates(da,&Coords);
138: if (Coords) {
139: const PetscScalar *coords;
140: VecGetArrayRead(Coords,&coords);
141: if (!rank) {
142: if (r) {
143: PetscMPIInt nn;
144: MPI_Recv(array,nnodes*dim,MPIU_SCALAR,r,tag,comm,&status);
145: MPI_Get_count(&status,MPIU_SCALAR,&nn);
146: if (nn != nnodes*dim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
147: } else {
148: PetscMemcpy(array,coords,nnodes*dim*sizeof(PetscScalar));
149: }
150: /* Transpose coordinates to VTK (C-style) ordering */
151: for (k=0; k<zm; k++) {
152: for (j=0; j<ym; j++) {
153: for (i=0; i<xm; i++) {
154: PetscInt Iloc = i+xm*(j+ym*k);
155: array2[Iloc*3+0] = array[Iloc*dim + 0];
156: array2[Iloc*3+1] = dim > 1 ? array[Iloc*dim + 1] : 0;
157: array2[Iloc*3+2] = dim > 2 ? array[Iloc*dim + 2] : 0;
158: }
159: }
160: }
161: } else if (r == rank) {
162: MPI_Send((void*)coords,nnodes*dim,MPIU_SCALAR,0,tag,comm);
163: }
164: VecRestoreArrayRead(Coords,&coords);
165: } else { /* Fabricate some coordinates using grid index */
166: for (k=0; k<zm; k++) {
167: for (j=0; j<ym; j++) {
168: for (i=0; i<xm; i++) {
169: PetscInt Iloc = i+xm*(j+ym*k);
170: array2[Iloc*3+0] = xs+i;
171: array2[Iloc*3+1] = ys+j;
172: array2[Iloc*3+2] = zs+k;
173: }
174: }
175: }
176: }
177: PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,PETSC_SCALAR);
178: }
180: /* Write each of the objects queued up for this file */
181: for (link=vtk->link; link; link=link->next) {
182: Vec X = (Vec)link->vec;
183: const PetscScalar *x;
185: VecGetArrayRead(X,&x);
186: if (!rank) {
187: if (r) {
188: PetscMPIInt nn;
189: MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
190: MPI_Get_count(&status,MPIU_SCALAR,&nn);
191: if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
192: } else {
193: PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));
194: }
195: for (f=0; f<bs; f++) {
196: /* Extract and transpose the f'th field */
197: for (k=0; k<zm; k++) {
198: for (j=0; j<ym; j++) {
199: for (i=0; i<xm; i++) {
200: PetscInt Iloc = i+xm*(j+ym*k);
201: array2[Iloc] = array[Iloc*bs + f];
202: }
203: }
204: }
205: PetscViewerVTKFWrite(viewer,fp,array2,nnodes,PETSC_SCALAR);
206: }
207: } else if (r == rank) {
208: MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
209: }
210: VecRestoreArrayRead(X,&x);
211: }
212: }
213: PetscFree2(array,array2);
214: PetscFree(grloc);
216: PetscFPrintf(comm,fp,"\n </AppendedData>\n");
217: PetscFPrintf(comm,fp,"</VTKFile>\n");
218: PetscFClose(comm,fp);
219: return(0);
220: }
225: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
226: {
227: #if defined(PETSC_USE_REAL_SINGLE)
228: const char precision[] = "Float32";
229: #elif defined(PETSC_USE_REAL_DOUBLE)
230: const char precision[] = "Float64";
231: #else
232: const char precision[] = "UnknownPrecision";
233: #endif
234: MPI_Comm comm;
235: PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data;
236: PetscViewerVTKObjectLink link;
237: FILE *fp;
238: PetscMPIInt rank,size,tag;
239: DMDALocalInfo info;
240: PetscInt dim,mx,my,mz,bs,boffset,maxnnodes,i,j,k,f,r;
241: PetscInt rloc[6],(*grloc)[6] = NULL;
242: PetscScalar *array,*array2;
243: PetscReal gmin[3],gmax[3];
244: PetscErrorCode ierr;
247: PetscObjectGetComm((PetscObject)da,&comm);
248: #if defined(PETSC_USE_COMPLEX)
249: SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
250: #endif
251: MPI_Comm_size(comm,&size);
252: MPI_Comm_rank(comm,&rank);
253: DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);
254: DMDAGetLocalInfo(da,&info);
255: DMDAGetBoundingBox(da,gmin,gmax);
256: PetscFOpen(comm,vtk->filename,"wb",&fp);
257: PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
258: #if defined(PETSC_WORDS_BIGENDIAN)
259: PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");
260: #else
261: PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");
262: #endif
263: PetscFPrintf(comm,fp," <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);
265: if (!rank) {PetscMalloc1(size*6,&grloc);}
266: rloc[0] = info.xs;
267: rloc[1] = info.xm;
268: rloc[2] = info.ys;
269: rloc[3] = info.ym;
270: rloc[4] = info.zs;
271: rloc[5] = info.zm;
272: MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);
274: /* Write XML header */
275: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
276: boffset = 0; /* Offset into binary file */
277: for (r=0; r<size; r++) {
278: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
279: if (!rank) {
280: xs = grloc[r][0];
281: xm = grloc[r][1];
282: ys = grloc[r][2];
283: ym = grloc[r][3];
284: zs = grloc[r][4];
285: zm = grloc[r][5];
286: nnodes = xm*ym*zm;
287: }
288: maxnnodes = PetscMax(maxnnodes,nnodes);
289: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
290: PetscFPrintf(comm,fp," <Coordinates>\n");
291: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Xcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
292: boffset += xm*sizeof(PetscScalar) + sizeof(int);
293: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Ycoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
294: boffset += ym*sizeof(PetscScalar) + sizeof(int);
295: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Zcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
296: boffset += zm*sizeof(PetscScalar) + sizeof(int);
297: PetscFPrintf(comm,fp," </Coordinates>\n");
298: PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");
299: for (link=vtk->link; link; link=link->next) {
300: Vec X = (Vec)link->vec;
301: const char *vecname = "";
302: if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
303: PetscObjectGetName((PetscObject)X,&vecname);
304: }
305: for (i=0; i<bs; i++) {
306: char buf[256];
307: const char *fieldname;
308: DMDAGetFieldName(da,i,&fieldname);
309: if (!fieldname) {
310: PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);
311: fieldname = buf;
312: }
313: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
314: boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
315: }
316: }
317: PetscFPrintf(comm,fp," </PointData>\n");
318: PetscFPrintf(comm,fp," </Piece>\n");
319: }
320: PetscFPrintf(comm,fp," </RectilinearGrid>\n");
321: PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");
322: PetscFPrintf(comm,fp,"_");
324: /* Now write the arrays. */
325: tag = ((PetscObject)viewer)->tag;
326: PetscMalloc2(PetscMax(maxnnodes*bs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*3,info.xm+info.ym+info.zm),&array2);
327: for (r=0; r<size; r++) {
328: MPI_Status status;
329: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
330: if (!rank) {
331: xs = grloc[r][0];
332: xm = grloc[r][1];
333: ys = grloc[r][2];
334: ym = grloc[r][3];
335: zs = grloc[r][4];
336: zm = grloc[r][5];
337: nnodes = xm*ym*zm;
338: } else if (r == rank) {
339: nnodes = info.xm*info.ym*info.zm;
340: }
341: { /* Write the coordinates */
342: Vec Coords;
343: DMGetCoordinates(da,&Coords);
344: if (Coords) {
345: const PetscScalar *coords;
346: VecGetArrayRead(Coords,&coords);
347: if (!rank) {
348: if (r) {
349: PetscMPIInt nn;
350: MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);
351: MPI_Get_count(&status,MPIU_SCALAR,&nn);
352: if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
353: } else {
354: /* x coordinates */
355: for (j=0, k=0, i=0; i<xm; i++) {
356: PetscInt Iloc = i+xm*(j+ym*k);
357: array[i] = coords[Iloc*dim + 0];
358: }
359: /* y coordinates */
360: for (i=0, k=0, j=0; j<ym; j++) {
361: PetscInt Iloc = i+xm*(j+ym*k);
362: array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
363: }
364: /* z coordinates */
365: for (i=0, j=0, k=0; k<zm; k++) {
366: PetscInt Iloc = i+xm*(j+ym*k);
367: array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
368: }
369: }
370: } else if (r == rank) {
371: xm = info.xm;
372: ym = info.ym;
373: zm = info.zm;
374: for (j=0, k=0, i=0; i<xm; i++) {
375: PetscInt Iloc = i+xm*(j+ym*k);
376: array2[i] = coords[Iloc*dim + 0];
377: }
378: for (i=0, k=0, j=0; j<ym; j++) {
379: PetscInt Iloc = i+xm*(j+ym*k);
380: array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
381: }
382: for (i=0, j=0, k=0; k<zm; k++) {
383: PetscInt Iloc = i+xm*(j+ym*k);
384: array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
385: }
386: MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);
387: }
388: VecRestoreArrayRead(Coords,&coords);
389: } else { /* Fabricate some coordinates using grid index */
390: for (j=0, k=0, i=0; i<xm; i++) {
391: array[i] = xs+i;
392: }
393: for (i=0, k=0, j=0; j<ym; j++) {
394: array[j+xm] = ys+j;
395: }
396: for (i=0, j=0, k=0; k<zm; k++) {
397: array[k+xm+ym] = zs+k;
398: }
399: }
400: if (!rank) {
401: PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,PETSC_SCALAR);
402: PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,PETSC_SCALAR);
403: PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,PETSC_SCALAR);
404: }
405: }
407: /* Write each of the objects queued up for this file */
408: for (link=vtk->link; link; link=link->next) {
409: Vec X = (Vec)link->vec;
410: const PetscScalar *x;
412: VecGetArrayRead(X,&x);
413: if (!rank) {
414: if (r) {
415: PetscMPIInt nn;
416: MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
417: MPI_Get_count(&status,MPIU_SCALAR,&nn);
418: if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
419: } else {
420: PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));
421: }
422: for (f=0; f<bs; f++) {
423: /* Extract and transpose the f'th field */
424: for (k=0; k<zm; k++) {
425: for (j=0; j<ym; j++) {
426: for (i=0; i<xm; i++) {
427: PetscInt Iloc = i+xm*(j+ym*k);
428: array2[Iloc] = array[Iloc*bs + f];
429: }
430: }
431: }
432: PetscViewerVTKFWrite(viewer,fp,array2,nnodes,PETSC_SCALAR);
433: }
434: } else if (r == rank) {
435: MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
436: }
437: VecRestoreArrayRead(X,&x);
438: }
439: }
440: PetscFree2(array,array2);
441: PetscFree(grloc);
443: PetscFPrintf(comm,fp,"\n </AppendedData>\n");
444: PetscFPrintf(comm,fp,"</VTKFile>\n");
445: PetscFClose(comm,fp);
446: return(0);
447: }
451: /*@C
452: DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
454: Collective
456: Input Arguments:
457: odm - DM specifying the grid layout, passed as a PetscObject
458: viewer - viewer of type VTK
460: Level: developer
462: Note:
463: This function is a callback used by the VTK viewer to actually write the file.
464: The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
465: Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
467: .seealso: PETSCVIEWERVTK
468: @*/
469: PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
470: {
471: DM dm = (DM)odm;
472: PetscBool isvtk;
478: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
479: if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
480: switch (viewer->format) {
481: case PETSC_VIEWER_VTK_VTS:
482: DMDAVTKWriteAll_VTS(dm,viewer);
483: break;
484: case PETSC_VIEWER_VTK_VTR:
485: DMDAVTKWriteAll_VTR(dm,viewer);
486: break;
487: default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
488: }
489: return(0);
490: }