Actual source code: vecio.c

  1: #define PETSCVEC_DLL
  2: /* 
  3:    This file contains simple binary input routines for vectors.  The
  4:    analogous output routines are within each vector implementation's 
  5:    VecView (with viewer types PETSC_VIEWER_BINARY)
  6:  */

 8:  #include petsc.h
 9:  #include petscsys.h
 10:  #include petscvec.h
 11:  #include private/vecimpl.h
 12: #if defined(PETSC_HAVE_PNETCDF)
 14: #include "pnetcdf.h"
 16: #endif
 17: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
 18: EXTERN PetscErrorCode VecLoad_Netcdf(PetscViewer, Vec*);
 19: EXTERN PetscErrorCode VecLoadIntoVector_Binary(PetscViewer, Vec);
 20: EXTERN PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer, Vec);

 24: /*@C 
 25:   VecLoad - Loads a vector that has been stored in binary format
 26:   with VecView().

 28:   Collective on PetscViewer 

 30:   Input Parameters:
 31: + viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
 32:            NetCDF file viewer, obtained from PetscViewerNetcdfOpen()
 33: - outtype - the type of vector VECSEQ or VECMPI or PETSC_NULL (which indicates
 34:             using VECSEQ if the communicator in the Viewer is of size 1; otherwise
 35:             use VECMPI).

 37:   Output Parameter:
 38: . newvec - the newly loaded vector

 40:    Level: intermediate

 42:   Notes:
 43:   The input file must contain the full global vector, as
 44:   written by the routine VecView().

 46:   Notes for advanced users:
 47:   Most users should not need to know the details of the binary storage
 48:   format, since VecLoad() and VecView() completely hide these details.
 49:   But for anyone who's interested, the standard binary matrix storage
 50:   format is
 51: .vb
 52:      int    VEC_FILE_COOKIE
 53:      int    number of rows
 54:      PetscScalar *values of all nonzeros
 55: .ve

 57:    Note for Cray users, the int's stored in the binary file are 32 bit
 58: integers; not 64 as they are represented in the memory, so if you
 59: write your own routines to read/write these binary files from the Cray
 60: you need to adjust the integer sizes that you read in, see
 61: PetscBinaryRead() and PetscBinaryWrite() to see how this may be
 62: done.

 64:    In addition, PETSc automatically does the byte swapping for
 65: machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
 66: linux, Windows and the paragon; thus if you write your own binary
 67: read/write routines you have to swap the bytes; see PetscBinaryRead()
 68: and PetscBinaryWrite() to see how this may be done.

 70:   Concepts: vector^loading from file

 72: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector() 
 73: @*/
 74: PetscErrorCode  VecLoad(PetscViewer viewer, VecType outtype,Vec *newvec)
 75: {
 77:   PetscTruth     isbinary,flg;
 78:   char           vtype[256];
 79:   const char    *prefix;
 80: #if defined(PETSC_HAVE_PNETCDF)
 81:   PetscTruth     isnetcdf;
 82: #endif

 87:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
 88: #if defined(PETSC_HAVE_PNETCDF)
 89:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
 90:   if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");
 91: #else
 92:   if (!isbinary)  SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
 93: #endif

 95: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
 96:   VecInitializePackage(PETSC_NULL);
 97: #endif
 98: #if defined(PETSC_HAVE_PNETCDF)
 99:   if (isnetcdf) {
100:     VecLoad_Netcdf(viewer,newvec);
101:   } else
102: #endif
103:   {
104:     Vec            factory;
105:     MPI_Comm       comm;
106:     PetscErrorCode (*r)(PetscViewer, VecType,Vec*);
107:     PetscMPIInt    size;

109:     PetscObjectGetOptionsPrefix((PetscObject)viewer,(const char**)&prefix);
110:     PetscOptionsGetString(prefix,"-vec_type",vtype,256,&flg);
111:     if (flg) {
112:       outtype = vtype;
113:     }
114:     PetscOptionsGetString(prefix,"-vecload_type",vtype,256,&flg);
115:     if (flg) {
116:       outtype = vtype;
117:     }
118:     PetscObjectGetComm((PetscObject)viewer,&comm);
119:     if (!outtype) {
120:       MPI_Comm_size(comm,&size);
121:       outtype = (size > 1) ? VECMPI : VECSEQ;
122:     }

124:     VecCreate(comm,&factory);
125:     VecSetSizes(factory,1,PETSC_DETERMINE);
126:     VecSetType(factory,outtype);
127:     r = factory->ops->load;
128:     VecDestroy(factory);
129:     if (!r) SETERRQ1(PETSC_ERR_SUP,"VecLoad is not supported for type: %s",outtype);
130:     (*r)(viewer,outtype,newvec);
131:   }
132:   return(0);
133: }

135: #if defined(PETSC_HAVE_PNETCDF)
138: PetscErrorCode VecLoad_Netcdf(PetscViewer viewer,Vec *newvec)
139: {
141:   PetscMPIInt    rank;
142:   PetscInt       N,n,bs;
143:   PetscInt       ncid,start;
144:   Vec            vec;
145:   PetscScalar    *avec;
146:   MPI_Comm       comm;
147:   PetscTruth     flag;
148:   char           name[NC_MAX_NAME];

152:   PetscObjectGetComm((PetscObject)viewer,&comm);
153:   MPI_Comm_rank(comm,&rank);
154:   PetscViewerNetcdfGetID(viewer,&ncid);
155:   ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
156:   VecCreate(comm,&vec);
157:   VecSetSizes(vec,PETSC_DECIDE,N);
158:   if (!rank) {
159:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
160:     if (flag) {
161:       VecSetBlockSize(vec,bs);
162:     }
163:   }
164:   VecSetFromOptions(vec);
165:   VecGetLocalSize(vec,&n);
166:   VecGetOwnershipRange(vec,&start,PETSC_NULL);
167:   VecGetArray(vec,&avec);
168:   ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
169:   VecRestoreArray(vec,&avec);
170:   *newvec = vec;
171:   VecAssemblyBegin(vec);
172:   VecAssemblyEnd(vec);
174:   return(0);
175: }
176: #endif

180: PetscErrorCode VecLoad_Binary(PetscViewer viewer, VecType itype,Vec *newvec)
181: {
182:   PetscMPIInt    size,rank,tag;
183:   int            fd;
184:   PetscInt       i,rows,type,n,*range,bs;
185:   PetscErrorCode ierr,nierr;
186:   Vec            vec;
187:   PetscScalar    *avec;
188:   MPI_Comm       comm;
189:   MPI_Request    request;
190:   MPI_Status     status;
191:   PetscTruth     flag;

195:   PetscViewerBinaryGetDescriptor(viewer,&fd);
196:   PetscObjectGetComm((PetscObject)viewer,&comm);
197:   MPI_Comm_rank(comm,&rank);
198:   MPI_Comm_size(comm,&size);

200:   if (!rank) {
201:     /* Read vector header. */
202:     PetscBinaryRead(fd,&type,1,PETSC_INT);if (ierr) goto handleerror;
203:     if (type != VEC_FILE_COOKIE) {PETSC_ERR_ARG_WRONG; goto handleerror;}
204:     PetscBinaryRead(fd,&rows,1,PETSC_INT);if (ierr) goto handleerror;
205:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
206:     VecCreate(comm,&vec);
207:     VecSetSizes(vec,PETSC_DECIDE,rows);
208:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
209:     if (flag) {
210:       VecSetBlockSize(vec,bs);
211:     }
212:     VecSetFromOptions(vec);
213:     VecGetLocalSize(vec,&n);
214:     VecGetArray(vec,&avec);
215:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
216:     VecRestoreArray(vec,&avec);

218:     if (size > 1) {
219:       /* read in other chuncks and send to other processors */
220:       /* determine maximum chunck owned by other */
221:       range = vec->map.range;
222:       n = 1;
223:       for (i=1; i<size; i++) {
224:         n = PetscMax(n,range[i+1] - range[i]);
225:       }
226:       PetscMalloc(n*sizeof(PetscScalar),&avec);
227:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
228:       for (i=1; i<size; i++) {
229:         n    = range[i+1] - range[i];
230:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
231:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
232:         MPI_Wait(&request,&status);
233:       }
234:       PetscFree(avec);
235:     }
236:   } else {
237:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
238:     /* this is a marker sent to indicate that the file does not have a vector at this location */
239:     if (rows == -1)  {
241:       SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Error loading vector");
242:     }
243:     VecCreate(comm,&vec);
244:     VecSetSizes(vec,PETSC_DECIDE,rows);
245:     VecSetFromOptions(vec);
246:     VecGetLocalSize(vec,&n);
247:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
248:     VecGetArray(vec,&avec);
249:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
250:     VecRestoreArray(vec,&avec);
251:   }
252:   *newvec = vec;
253:   VecAssemblyBegin(vec);
254:   VecAssemblyEnd(vec);
256:   return(0);
257:   /* tell the other processors we've had an error; only used on process 0 */
258:   handleerror:
259:     if (PetscExceptionValue(ierr)) {
261:       n-1; MPI_Bcast(&nierr,1,MPIU_INT,0,comm);
262:     }
263: 
264:   return(0);
265: }

267: #if defined(PETSC_HAVE_PNETCDF)
270: PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer viewer,Vec vec)
271: {
273:   PetscMPIInt    rank;
274:   PetscInt       N,rows,n,bs;
275:   PetscInt       ncid,start;
276:   PetscScalar    *avec;
277:   MPI_Comm       comm;
278:   PetscTruth     flag;
279:   char           name[NC_MAX_NAME];

283:   PetscObjectGetComm((PetscObject)viewer,&comm);
284:   MPI_Comm_rank(comm,&rank);
285:   PetscViewerNetcdfGetID(viewer,&ncid);
286:   ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
287:   if (!rank) {
288:     VecGetSize(vec,&rows);
289:     if (N != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
290:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
291:     if (flag) {
292:       VecSetBlockSize(vec,bs);
293:     }
294:   }
295:   VecSetFromOptions(vec);
296:   VecGetLocalSize(vec,&n);
297:   VecGetOwnershipRange(vec,&start,PETSC_NULL);
298:   VecGetArray(vec,&avec);
299:   ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
300:   VecRestoreArray(vec,&avec);
301:   VecAssemblyBegin(vec);
302:   VecAssemblyEnd(vec);
304:   return(0);
305: }
306: #endif

308: #if defined(PETSC_HAVE_HDF5)
311: PetscErrorCode VecLoadIntoVector_HDF5(PetscViewer viewer, Vec xin)
312: {
313:   int            rank = 1; /* Could have rank 2 for blocked vectors */
314:   PetscInt       n, N, bs, low;
315:   PetscScalar   *x;
316:   PetscTruth     flag;
317:   hid_t          file_id, dset_id, filespace, memspace, plist_id;
318:   hsize_t        dims[1];
319:   hsize_t        count[1];
320:   hsize_t        offset[1];
321:   herr_t         status;

326:   PetscOptionsGetInt(PETSC_NULL, "-vecload_block_size", &bs, &flag);
327:   if (flag) {
328:     VecSetBlockSize(xin, bs);
329:   }
330:   VecSetFromOptions(xin);

332:   PetscViewerHDF5GetFileId(viewer, &file_id);

334:   /* Create the dataset with default properties and close filespace */
335:   dset_id = H5Dopen(file_id, "Vec");

337:   /* Retrieve the dataspace for the dataset */
338:   VecGetSize(xin, &N);
339:   filespace = H5Dget_space(dset_id);
340:   H5Sget_simple_extent_dims(filespace, dims, PETSC_NULL);
341:   if (N != (int) dims[0]) SETERRQ(PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length then input vector");

343:   /* Each process defines a dataset and writes it to the hyperslab in the file */
344:   VecGetLocalSize(xin, &n);
345:   count[0] = n;
346:   memspace = H5Screate_simple(rank, count, NULL);

348:   /* Select hyperslab in the file */
349:   VecGetOwnershipRange(xin, &low, PETSC_NULL);
350:   offset[0] = low;
351:   status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);

353:   /* Create property list for collective dataset read */
354:   plist_id = H5Pcreate(H5P_DATASET_XFER);
355: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
356:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
357:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
358: #endif

360:   VecGetArray(xin, &x);
361:   status = H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
362:   VecRestoreArray(xin, &x);

364:   /* Close/release resources */
365:   status = H5Pclose(plist_id);CHKERRQ(status);
366:   status = H5Sclose(filespace);CHKERRQ(status);
367:   status = H5Sclose(memspace);CHKERRQ(status);
368:   status = H5Dclose(dset_id);CHKERRQ(status);

370:   VecAssemblyBegin(xin);
371:   VecAssemblyEnd(xin);
373:   return(0);
374: }
375: #endif

379: PetscErrorCode VecLoadIntoVector_Binary(PetscViewer viewer,Vec vec)
380: {
382:   PetscMPIInt    size,rank,tag;
383:   PetscInt       i,rows,type,n,*range;
384:   int            fd;
385:   PetscScalar    *avec;
386:   MPI_Comm       comm;
387:   MPI_Request    request;
388:   MPI_Status     status;


393:   PetscViewerBinaryGetDescriptor(viewer,&fd);
394:   PetscObjectGetComm((PetscObject)viewer,&comm);
395:   MPI_Comm_rank(comm,&rank);
396:   MPI_Comm_size(comm,&size);

398:   if (!rank) {
399:     /* Read vector header. */
400:     PetscBinaryRead(fd,&type,1,PETSC_INT);
401:     if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
402:     PetscBinaryRead(fd,&rows,1,PETSC_INT);
403:     VecGetSize(vec,&n);
404:     if (n != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
405:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);

407:     VecSetFromOptions(vec);
408:     VecGetLocalSize(vec,&n);
409:     VecGetArray(vec,&avec);
410:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
411:     VecRestoreArray(vec,&avec);

413:     if (size > 1) {
414:       /* read in other chuncks and send to other processors */
415:       /* determine maximum chunck owned by other */
416:       range = vec->map.range;
417:       n = 1;
418:       for (i=1; i<size; i++) {
419:         n = PetscMax(n,range[i+1] - range[i]);
420:       }
421:       PetscMalloc(n*sizeof(PetscScalar),&avec);
422:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
423:       for (i=1; i<size; i++) {
424:         n    = range[i+1] - range[i];
425:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
426:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
427:         MPI_Wait(&request,&status);
428:       }
429:       PetscFree(avec);
430:     }
431:   } else {
432:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
433:     VecSetFromOptions(vec);
434:     VecGetLocalSize(vec,&n);
435:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
436:     VecGetArray(vec,&avec);
437:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
438:     VecRestoreArray(vec,&avec);
439:   }
440:   VecAssemblyBegin(vec);
441:   VecAssemblyEnd(vec);
443:   return(0);
444: }

448: PetscErrorCode VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
449: {
450:   PetscTruth     isbinary;
451: #if defined(PETSC_HAVE_PNETCDF)
452:   PetscTruth     isnetcdf;
453: #endif
454: #if defined(PETSC_HAVE_HDF5)
455:   PetscTruth     ishdf5;
456: #endif

460:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
461: #if defined(PETSC_HAVE_PNETCDF)
462:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
463: #endif
464: #if defined(PETSC_HAVE_HDF5)
465:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
466: #endif

468:   if (isbinary) {
469:     VecLoadIntoVector_Binary(viewer,vec);
470: #if defined(PETSC_HAVE_PNETCDF)
471:   } else if (isnetcdf) {
472:     VecLoadIntoVector_Netcdf(viewer,vec);
473: #endif
474: #if defined(PETSC_HAVE_HDF5)
475:   } else if (ishdf5) {
476:     VecLoadIntoVector_HDF5(viewer,vec);
477: #endif
478:   } else {
479:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
480:   }
481:   return(0);
482: }