Actual source code: vecio.c

  1: /*$Id: vecio.c,v 1.71 2001/03/23 23:21:18 balay Exp $*/

  3: /* 
  4:    This file contains simple binary input routines for vectors.  The
  5:    analogous output routines are within each vector implementation's 
  6:    VecView (with viewer types PETSC_VIEWER_BINARY)
  7:  */

 9:  #include petsc.h
 10:  #include petscsys.h
 11:  #include petscvec.h

 13: /*@C 
 14:   VecLoad - Loads a vector that has been stored in binary format
 15:   with VecView().

 17:   Collective on PetscViewer 

 19:   Input Parameters:
 20: . viewer - binary file viewer, obtained from PetscViewerBinaryOpen()

 22:   Output Parameter:
 23: . newvec - the newly loaded vector

 25:    Level: intermediate

 27:   Notes:
 28:   The input file must contain the full global vector, as
 29:   written by the routine VecView().

 31:   Notes for advanced users:
 32:   Most users should not need to know the details of the binary storage
 33:   format, since VecLoad() and VecView() completely hide these details.
 34:   But for anyone who's interested, the standard binary matrix storage
 35:   format is
 36: .vb
 37:      int    VEC_COOKIE
 38:      int    number of rows
 39:      Scalar *values of all nonzeros
 40: .ve

 42:    Note for Cray users, the int's stored in the binary file are 32 bit
 43: integers; not 64 as they are represented in the memory, so if you
 44: write your own routines to read/write these binary files from the Cray
 45: you need to adjust the integer sizes that you read in, see
 46: PetscReadBinary() and PetscWriteBinary() to see how this may be
 47: done.

 49:    In addition, PETSc automatically does the byte swapping for
 50: machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
 51: linux, nt and the paragon; thus if you write your own binary
 52: read/write routines you have to swap the bytes; see PetscReadBinary()
 53: and PetscWriteBinary() to see how this may be done.

 55:   Concepts: vector^loading from file

 57: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector() 
 58: @*/
 59: int VecLoad(PetscViewer viewer,Vec *newvec)
 60: {
 61:   int         i,rows,ierr,type,fd,rank,size,n,*range,tag,bs;
 62:   Vec         vec;
 63:   Scalar      *avec;
 64:   MPI_Comm    comm;
 65:   MPI_Request request;
 66:   MPI_Status  status;
 67:   Map         map;
 68:   PetscTruth  isbinary,flag;

 72:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
 73:   if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
 74:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
 75:   PetscViewerBinaryGetDescriptor(viewer,&fd);
 76:   PetscObjectGetComm((PetscObject)viewer,&comm);
 77:   MPI_Comm_rank(comm,&rank);
 78:   MPI_Comm_size(comm,&size);

 80:   if (!rank) {
 81:     /* Read vector header. */
 82:     PetscBinaryRead(fd,&type,1,PETSC_INT);
 83:     if (type != VEC_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
 84:     PetscBinaryRead(fd,&rows,1,PETSC_INT);
 85:     MPI_Bcast(&rows,1,MPI_INT,0,comm);
 86:     VecCreate(comm,PETSC_DECIDE,rows,&vec);
 87:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
 88:     if (flag) {
 89:       VecSetBlockSize(vec,bs);
 90:     }
 91:     VecSetFromOptions(vec);
 92:     VecGetLocalSize(vec,&n);
 93:     VecGetArray(vec,&avec);
 94:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
 95:     VecRestoreArray(vec,&avec);

 97:     if (size > 1) {
 98:       /* read in other chuncks and send to other processors */
 99:       /* determine maximum chunck owned by other */
100:       VecGetMap(vec,&map);
101:       MapGetGlobalRange(map,&range);
102:       n = 1;
103:       for (i=1; i<size; i++) {
104:         n = PetscMax(n,range[i] - range[i-1]);
105:       }
106:       PetscMalloc(n*sizeof(Scalar),&avec);
107:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
108:       for (i=1; i<size; i++) {
109:         n    = range[i+1] - range[i];
110:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
111:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
112:         MPI_Wait(&request,&status);
113:       }
114:       PetscFree(avec);
115:     }
116:   } else {
117:     MPI_Bcast(&rows,1,MPI_INT,0,comm);
118:     VecCreate(comm,PETSC_DECIDE,rows,&vec);
119:     VecSetFromOptions(vec);
120:     VecGetLocalSize(vec,&n);
121:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
122:     VecGetArray(vec,&avec);
123:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
124:     VecRestoreArray(vec,&avec);
125:   }
126:   *newvec = vec;
127:   VecAssemblyBegin(vec);
128:   VecAssemblyEnd(vec);
129:   PetscLogEventEnd(VEC_Load,viewer,0,0,0);
130:   return(0);
131: }

133: int VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
134: {
135:   int         i,rows,ierr,type,fd,rank,size,n,*range,tag,bs;
136:   Scalar      *avec;
137:   MPI_Comm    comm;
138:   MPI_Request request;
139:   MPI_Status  status;
140:   Map         map;
141:   PetscTruth  isbinary,flag;


145:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
146:   if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
147:   PetscLogEventBegin(VEC_Load,viewer,vec,0,0);

149:   PetscViewerBinaryGetDescriptor(viewer,&fd);
150:   PetscObjectGetComm((PetscObject)viewer,&comm);
151:   MPI_Comm_rank(comm,&rank);
152:   MPI_Comm_size(comm,&size);

154:   if (!rank) {
155:     /* Read vector header. */
156:     PetscBinaryRead(fd,&type,1,PETSC_INT);
157:     if (type != VEC_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
158:     PetscBinaryRead(fd,&rows,1,PETSC_INT);
159:     VecGetSize(vec,&n);
160:     if (n != rows) SETERRQ(1,"Vector in file different length then input vector");
161:     MPI_Bcast(&rows,1,MPI_INT,0,comm);

163:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
164:     if (flag) {
165:       VecSetBlockSize(vec,bs);
166:     }
167:     VecSetFromOptions(vec);
168:     VecGetLocalSize(vec,&n);
169:     VecGetArray(vec,&avec);
170:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
171:     VecRestoreArray(vec,&avec);

173:     if (size > 1) {
174:       /* read in other chuncks and send to other processors */
175:       /* determine maximum chunck owned by other */
176:       VecGetMap(vec,&map);
177:       MapGetGlobalRange(map,&range);
178:       n = 1;
179:       for (i=1; i<size; i++) {
180:         n = PetscMax(n,range[i] - range[i-1]);
181:       }
182:       PetscMalloc(n*sizeof(Scalar),&avec);
183:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
184:       for (i=1; i<size; i++) {
185:         n    = range[i+1] - range[i];
186:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
187:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
188:         MPI_Wait(&request,&status);
189:       }
190:       PetscFree(avec);
191:     }
192:   } else {
193:     MPI_Bcast(&rows,1,MPI_INT,0,comm);
194:     VecSetFromOptions(vec);
195:     VecGetLocalSize(vec,&n);
196:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
197:     VecGetArray(vec,&avec);
198:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
199:     VecRestoreArray(vec,&avec);
200:   }
201:   VecAssemblyBegin(vec);
202:   VecAssemblyEnd(vec);
203:   PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
204:   return(0);
205: }