Actual source code: pdvec.c

  1: /*
  2:      Code for some of the parallel vector primatives.
  3: */
 4:  #include src/vec/impls/mpi/pvecimpl.h
  5: #if defined(PETSC_HAVE_PNETCDF)
  7: #include "pnetcdf.h"
  9: #endif

 13: PetscErrorCode VecDestroy_MPI(Vec v)
 14: {
 15:   Vec_MPI        *x = (Vec_MPI*)v->data;

 19: #if defined(PETSC_USE_LOG)
 20:   PetscLogObjectState((PetscObject)v,"Length=%D",v->N);
 21: #endif  
 22:   if (x->array_allocated) {PetscFree(x->array_allocated);}

 24:   /* Destroy local representation of vector if it exists */
 25:   if (x->localrep) {
 26:     VecDestroy(x->localrep);
 27:     if (x->localupdate) {VecScatterDestroy(x->localupdate);}
 28:   }
 29:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 30:   VecStashDestroy_Private(&v->bstash);
 31:   VecStashDestroy_Private(&v->stash);
 32:   PetscFree(x);
 33:   return(0);
 34: }

 38: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 39: {
 40:   PetscErrorCode    ierr;
 41:   PetscInt          i,work = xin->n,cnt,len;
 42:   PetscMPIInt       j,n,size,rank,tag = ((PetscObject)viewer)->tag;
 43:   MPI_Status        status;
 44:   PetscScalar       *values,*xarray;
 45:   char              *name;
 46:   PetscViewerFormat format;

 49:   VecGetArray(xin,&xarray);
 50:   /* determine maximum message to arrive */
 51:   MPI_Comm_rank(xin->comm,&rank);
 52:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
 53:   MPI_Comm_size(xin->comm,&size);

 55:   if (!rank) {
 56:     PetscMalloc((len+1)*sizeof(PetscScalar),&values);
 57:     PetscViewerGetFormat(viewer,&format);
 58:     /*
 59:         Matlab format and ASCII format are very similar except 
 60:         Matlab uses %18.16e format while ASCII uses %g
 61:     */
 62:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 63:       PetscObjectGetName((PetscObject)xin,&name);
 64:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 65:       for (i=0; i<xin->n; i++) {
 66: #if defined(PETSC_USE_COMPLEX)
 67:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 68:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
 69:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 70:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
 71:         } else {
 72:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
 73:         }
 74: #else
 75:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
 76: #endif
 77:       }
 78:       /* receive and print messages */
 79:       for (j=1; j<size; j++) {
 80:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
 81:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 82:         for (i=0; i<n; i++) {
 83: #if defined(PETSC_USE_COMPLEX)
 84:           if (PetscImaginaryPart(values[i]) > 0.0) {
 85:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
 86:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 87:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
 88:           } else {
 89:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
 90:           }
 91: #else
 92:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
 93: #endif
 94:         }
 95:       }
 96:       PetscViewerASCIIPrintf(viewer,"];\n");

 98:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
 99:       for (i=0; i<xin->n; i++) {
100: #if defined(PETSC_USE_COMPLEX)
101:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
102: #else
103:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
104: #endif
105:       }
106:       /* receive and print messages */
107:       for (j=1; j<size; j++) {
108:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
109:         MPI_Get_count(&status,MPIU_SCALAR,&n);
110:         for (i=0; i<n; i++) {
111: #if defined(PETSC_USE_COMPLEX)
112:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
113: #else
114:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
115: #endif
116:         }
117:       }

119:     } else {
120:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
121:       cnt = 0;
122:       for (i=0; i<xin->n; i++) {
123:         if (format == PETSC_VIEWER_ASCII_INDEX) {
124:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
125:         }
126: #if defined(PETSC_USE_COMPLEX)
127:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
128:           PetscViewerASCIIPrintf(viewer,"%g + %g i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
129:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
130:           PetscViewerASCIIPrintf(viewer,"%g - %g i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
131:         } else {
132:           PetscViewerASCIIPrintf(viewer,"%g\n",PetscRealPart(xarray[i]));
133:         }
134: #else
135:         PetscViewerASCIIPrintf(viewer,"%g\n",xarray[i]);
136: #endif
137:       }
138:       /* receive and print messages */
139:       for (j=1; j<size; j++) {
140:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
141:         MPI_Get_count(&status,MPIU_SCALAR,&n);
142:         if (format != PETSC_VIEWER_ASCII_COMMON) {
143:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
144:         }
145:         for (i=0; i<n; i++) {
146:           if (format == PETSC_VIEWER_ASCII_INDEX) {
147:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
148:           }
149: #if defined(PETSC_USE_COMPLEX)
150:           if (PetscImaginaryPart(values[i]) > 0.0) {
151:             PetscViewerASCIIPrintf(viewer,"%g + %g i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
152:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
153:             PetscViewerASCIIPrintf(viewer,"%g - %g i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
154:           } else {
155:             PetscViewerASCIIPrintf(viewer,"%g\n",PetscRealPart(values[i]));
156:           }
157: #else
158:           PetscViewerASCIIPrintf(viewer,"%g\n",values[i]);
159: #endif
160:         }
161:       }
162:     }
163:     PetscFree(values);
164:   } else {
165:     /* send values */
166:     MPI_Send(xarray,xin->n,MPIU_SCALAR,0,tag,xin->comm);
167:   }
168:   PetscViewerFlush(viewer);
169:   VecRestoreArray(xin,&xarray);
170:   return(0);
171: }

175: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
176: {
178:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,n;
179:   PetscInt       len,work = xin->n,j;
180:   int            fdes;
181:   MPI_Status     status;
182:   PetscScalar    *values,*xarray;
183:   FILE           *file;

186:   VecGetArray(xin,&xarray);
187:   PetscViewerBinaryGetDescriptor(viewer,&fdes);

189:   /* determine maximum message to arrive */
190:   MPI_Comm_rank(xin->comm,&rank);
191:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
192:   MPI_Comm_size(xin->comm,&size);

194:   if (!rank) {
195:     PetscInt cookie = VEC_FILE_COOKIE;
196:     PetscBinaryWrite(fdes,&cookie,1,PETSC_INT,PETSC_FALSE);
197:     PetscBinaryWrite(fdes,&xin->N,1,PETSC_INT,PETSC_FALSE);
198:     PetscBinaryWrite(fdes,xarray,xin->n,PETSC_SCALAR,PETSC_FALSE);

200:     PetscMalloc((len+1)*sizeof(PetscScalar),&values);
201:     /* receive and print messages */
202:     for (j=1; j<size; j++) {
203:       MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
204:       MPI_Get_count(&status,MPIU_SCALAR,&n);
205:       PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_FALSE);
206:     }
207:     PetscFree(values);
208:     PetscViewerBinaryGetInfoPointer(viewer,&file);
209:     if (file && xin->bs > 1) {
210:       if (xin->prefix) {
211:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",xin->prefix,xin->bs);
212:       } else {
213:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->bs);
214:       }
215:     }
216:   } else {
217:     /* send values */
218:     MPI_Send(xarray,xin->n,MPIU_SCALAR,0,tag,xin->comm);
219:   }
220:   VecRestoreArray(xin,&xarray);
221:   return(0);
222: }

226: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
227: {
228:   PetscDraw      draw;
229:   PetscTruth     isnull;

232: #if defined(PETSC_USE_64BIT_INT)
234:   PetscViewerDrawGetDraw(viewer,0,&draw);
235:   PetscDrawIsNull(draw,&isnull);
236:   if (isnull) return(0);
237:   SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
238: #else
239:   PetscMPIInt    size,rank;
240:   int            i,N = xin->N,*lens;
241:   PetscReal      *xx,*yy;
242:   PetscDrawLG    lg;
243:   PetscScalar    *xarray;

246:   PetscViewerDrawGetDraw(viewer,0,&draw);
247:   PetscDrawIsNull(draw,&isnull);
248:   if (isnull) return(0);

250:   VecGetArray(xin,&xarray);
251:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
252:   PetscDrawCheckResizedWindow(draw);
253:   MPI_Comm_rank(xin->comm,&rank);
254:   MPI_Comm_size(xin->comm,&size);
255:   if (!rank) {
256:     PetscDrawLGReset(lg);
257:     PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
258:     for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
259:     yy   = xx + N;
260:     PetscMalloc(size*sizeof(int),&lens);
261:     for (i=0; i<size; i++) {
262:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
263:     }
264: #if !defined(PETSC_USE_COMPLEX)
265:     MPI_Gatherv(xarray,xin->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,xin->comm);
266: #else
267:     {
268:       PetscReal *xr;
269:       PetscMalloc((xin->n+1)*sizeof(PetscReal),&xr);
270:       for (i=0; i<xin->n; i++) {
271:         xr[i] = PetscRealPart(xarray[i]);
272:       }
273:       MPI_Gatherv(xr,xin->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,xin->comm);
274:       PetscFree(xr);
275:     }
276: #endif
277:     PetscFree(lens);
278:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
279:     PetscFree(xx);
280:   } else {
281: #if !defined(PETSC_USE_COMPLEX)
282:     MPI_Gatherv(xarray,xin->n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
283: #else
284:     {
285:       PetscReal *xr;
286:       PetscMalloc((xin->n+1)*sizeof(PetscReal),&xr);
287:       for (i=0; i<xin->n; i++) {
288:         xr[i] = PetscRealPart(xarray[i]);
289:       }
290:       MPI_Gatherv(xr,xin->n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
291:       PetscFree(xr);
292:     }
293: #endif
294:   }
295:   PetscDrawLGDraw(lg);
296:   PetscDrawSynchronizedFlush(draw);
297:   VecRestoreArray(xin,&xarray);
298: #endif
299:   return(0);
300: }

305: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
306: {
308:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
309:   PetscInt       i,start,end;
310:   MPI_Status     status;
311:   PetscReal      coors[4],ymin,ymax,xmin,xmax,tmp;
312:   PetscDraw      draw;
313:   PetscTruth     isnull;
314:   PetscDrawAxis  axis;
315:   PetscScalar    *xarray;

318:   PetscViewerDrawGetDraw(viewer,0,&draw);
319:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

321:   VecGetArray(xin,&xarray);
322:   PetscDrawCheckResizedWindow(draw);
323:   xmin = 1.e20; xmax = -1.e20;
324:   for (i=0; i<xin->n; i++) {
325: #if defined(PETSC_USE_COMPLEX)
326:     if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
327:     if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
328: #else
329:     if (xarray[i] < xmin) xmin = xarray[i];
330:     if (xarray[i] > xmax) xmax = xarray[i];
331: #endif
332:   }
333:   if (xmin + 1.e-10 > xmax) {
334:     xmin -= 1.e-5;
335:     xmax += 1.e-5;
336:   }
337:   MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,xin->comm);
338:   MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,xin->comm);
339:   MPI_Comm_size(xin->comm,&size);
340:   MPI_Comm_rank(xin->comm,&rank);
341:   PetscDrawAxisCreate(draw,&axis);
342:   PetscLogObjectParent(draw,axis);
343:   if (!rank) {
344:     PetscDrawClear(draw);
345:     PetscDrawFlush(draw);
346:     PetscDrawAxisSetLimits(axis,0.0,(double)xin->N,ymin,ymax);
347:     PetscDrawAxisDraw(axis);
348:     PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
349:   }
350:   PetscDrawAxisDestroy(axis);
351:   MPI_Bcast(coors,4,MPIU_REAL,0,xin->comm);
352:   if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
353:   /* draw local part of vector */
354:   VecGetOwnershipRange(xin,&start,&end);
355:   if (rank < size-1) { /*send value to right */
356:     MPI_Send(&xarray[xin->n-1],1,MPIU_REAL,rank+1,tag,xin->comm);
357:   }
358:   for (i=1; i<xin->n; i++) {
359: #if !defined(PETSC_USE_COMPLEX)
360:     PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
361:                    xarray[i],PETSC_DRAW_RED);
362: #else
363:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
364:                    PetscRealPart(xarray[i]),PETSC_DRAW_RED);
365: #endif
366:   }
367:   if (rank) { /* receive value from right */
368:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,xin->comm,&status);
369: #if !defined(PETSC_USE_COMPLEX)
370:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
371: #else
372:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
373: #endif
374:   }
375:   PetscDrawSynchronizedFlush(draw);
376:   PetscDrawPause(draw);
377:   VecRestoreArray(xin,&xarray);
378:   return(0);
379: }

384: PetscErrorCode VecView_MPI_Socket(Vec xin,PetscViewer viewer)
385: {
386: #if defined(PETSC_USE_64BIT_INT)
388:   SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
389: #else
391:   PetscMPIInt    rank,size;
392:   int            i,N = xin->N,*lens;
393:   PetscScalar    *xx,*xarray;

396:   VecGetArray(xin,&xarray);
397:   MPI_Comm_rank(xin->comm,&rank);
398:   MPI_Comm_size(xin->comm,&size);
399:   if (!rank) {
400:     PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
401:     PetscMalloc(size*sizeof(int),&lens);
402:     for (i=0; i<size; i++) {
403:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
404:     }
405:     MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,xin->comm);
406:     PetscFree(lens);
407:     PetscViewerSocketPutScalar(viewer,N,1,xx);
408:     PetscFree(xx);
409:   } else {
410:     MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
411:   }
412:   VecRestoreArray(xin,&xarray);
413: #endif
414:   return(0);
415: }

419: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
420: {
421: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_64BIT_INT)
423:   PetscMPIInt    rank,size,*lens;
424:   PetscInt       i,N = xin->N;
425:   PetscScalar    *xx,*xarray;

428:   VecGetArray(xin,&xarray);
429:   MPI_Comm_rank(xin->comm,&rank);
430:   MPI_Comm_size(xin->comm,&size);
431:   if (!rank) {
432:     PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
433:     PetscMalloc(size*sizeof(PetscMPIInt),&lens);
434:     for (i=0; i<size; i++) {
435:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
436:     }
437:     MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,xin->comm);
438:     PetscFree(lens);

440:     PetscObjectName((PetscObject)xin);
441:     PetscViewerMatlabPutArray(viewer,N,1,xx,xin->name);

443:     PetscFree(xx);
444:   } else {
445:     MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
446:   }
447:   VecRestoreArray(xin,&xarray);
448:   return(0);
449: #else
451:   SETERRQ(PETSC_ERR_SUP_SYS,"Build PETSc with Matlab to use this viewer");
452: #endif
453: }

457: PetscErrorCode VecView_MPI_Netcdf(Vec xin,PetscViewer v)
458: {
459: #if defined(PETSC_HAVE_PNETCDF)
461:   int         n = xin->n,ncid,xdim,xdim_num=1,xin_id,xstart;
462:   MPI_Comm    comm = xin->comm;
463:   PetscScalar *xarray;

466: #if !defined(PETSC_USE_COMPLEX)
467:   VecGetArray(xin,&xarray);
468:   PetscViewerNetcdfGetID(v,&ncid);
469:   if (ncid < 0) SETERRQ(PETSC_ERR_ORDER,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
470:   /* define dimensions */
471:   ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->N,&xdim);
472:   /* define variables */
473:   ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
474:   /* leave define mode */
475:   ncmpi_enddef(ncid);
476:   /* store the vector */
477:   VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
478:   ncmpi_put_vara_double_all(ncid,xin_id,(const size_t*)&xstart,(const size_t*)&n,xarray);
479:   VecRestoreArray(xin,&xarray);
480: #else 
481:     PetscPrintf(PETSC_COMM_WORLD,"NetCDF viewer not supported for complex numbers\n");
482: #endif
483:   return(0);
484: #else /* !defined(PETSC_HAVE_PNETCDF) */
486:   SETERRQ(PETSC_ERR_SUP_SYS,"Build PETSc with NetCDF to use this viewer");
487: #endif
488: }

492: PetscErrorCode VecView_MPI_HDF4_Ex(Vec X, PetscViewer viewer, int d, int *dims)
493: {
494: #if defined(PETSC_HAVE_HDF4) && !defined(PETSC_USE_COMPLEX)
496:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
497:   int            len, i, j, k, cur, bs, n, N;
498:   MPI_Status     status;
499:   PetscScalar    *x;
500:   float          *xlf, *xf;


504:   bs = X->bs > 0 ? X->bs : 1;
505:   N  = X->N / bs;
506:   n  = X->n / bs;

508:   // For now, always convert to float
509:   PetscMalloc(N * sizeof(float), &xf);
510:   PetscMalloc(n * sizeof(float), &xlf);

512:   MPI_Comm_rank(X->comm, &rank);
513:   MPI_Comm_size(X->comm, &size);

515:   VecGetArray(X, &x);

517:   for (k = 0; k < bs; k++) {
518:     for (i = 0; i < n; i++) {
519:       xlf[i] = (float) x[i*bs + k];
520:     }
521:     if (!rank) {
522:       cur = 0;
523:       PetscMemcpy(xf + cur, xlf, n * sizeof(float));
524:       cur += n;
525:       for (j = 1; j < size; j++) {
526:         MPI_Recv(xf + cur, N - cur, MPI_FLOAT, j, tag, X->comm,&status);
527:         MPI_Get_count(&status, MPI_FLOAT, &len);cur += len;
528:       }
529:       if (cur != N) {
530:         SETERRQ2(PETSC_ERR_PLIB, "? %D %D", cur, N);
531:       }
532:       PetscViewerHDF4WriteSDS(viewer, xf, 2, dims, bs);
533:     } else {
534:       MPI_Send(xlf, n, MPI_FLOAT, 0, tag, X->comm);
535:     }
536:   }
537:   VecRestoreArray(X, &x);
538:   PetscFree(xlf);
539:   PetscFree(xf);
540:   return(0);
541: #else /* !defined(PETSC_HAVE_HDF4) */
543:   SETERRQ(PETSC_ERR_SUP_SYS,"Build PETSc with HDF4 to use this viewer");
544: 
545: #endif
546: }

550: PetscErrorCode VecView_MPI_HDF4(Vec xin,PetscViewer viewer)
551: {
552: #if defined(PETSC_HAVE_HDF4) && !defined(PETSC_USE_COMPLEX)
554:   PetscErrorCode  bs, dims[1];

556:   bs = xin->bs > 0 ? xin->bs : 1;
557:   dims[0] = xin->N / bs;
558:   VecView_MPI_HDF4_Ex(xin, viewer, 1, dims);
559:   return(0);
560: #else /* !defined(PETSC_HAVE_HDF4) */
562:   SETERRQ(PETSC_ERR_SUP_SYS,"Build PETSc with HDF4 to use this viewer");
563: #endif
564: }

568: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
569: {
571:   PetscTruth     iascii,issocket,isbinary,isdraw;
572: #if defined(PETSC_HAVE_MATHEMATICA)
573:   PetscTruth     ismathematica;
574: #endif
575: #if defined(PETSC_HAVE_NETCDF)
576:   PetscTruth     isnetcdf;
577: #endif
578: #if defined(PETSC_HAVE_HDF4)
579:   PetscTruth     ishdf4;
580: #endif
581: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
582:   PetscTruth     ismatlab;
583: #endif

586:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
587:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
588:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
589:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
590: #if defined(PETSC_HAVE_MATHEMATICA)
591:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
592: #endif
593: #if defined(PETSC_HAVE_NETCDF)
594:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
595: #endif
596: #if defined(PETSC_HAVE_HDF4)
597:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF4,&ishdf4);
598: #endif
599: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
600:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
601: #endif
602:   if (iascii){
603:     VecView_MPI_ASCII(xin,viewer);
604:   } else if (issocket) {
605:     VecView_MPI_Socket(xin,viewer);
606:   } else if (isbinary) {
607:     VecView_MPI_Binary(xin,viewer);
608:   } else if (isdraw) {
609:     PetscViewerFormat format;

611:     PetscViewerGetFormat(viewer,&format);
612:     if (format == PETSC_VIEWER_DRAW_LG) {
613:       VecView_MPI_Draw_LG(xin,viewer);
614:     } else {
615:       VecView_MPI_Draw(xin,viewer);
616:     }
617: #if defined(PETSC_HAVE_MATHEMATICA)
618:   } else if (ismathematica) {
619:     PetscViewerMathematicaPutVector(viewer,xin);
620: #endif
621: #if defined(PETSC_HAVE_NETCDF)
622:   } else if (isnetcdf) {
623:     VecView_MPI_Netcdf(xin,viewer);
624: #endif
625: #if defined(PETSC_HAVE_HDF4)
626:   } else if (ishdf4) {
627:     VecView_MPI_HDF4(xin,viewer);
628: #endif
629: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
630:   } else if (ismatlab) {
631:     VecView_MPI_Matlab(xin,viewer);
632: #endif
633:   } else {
634:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
635:   }
636:   return(0);
637: }

641: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
642: {
644:   *N = xin->N;
645:   return(0);
646: }

650: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
651: {
653:   PetscMPIInt    rank = xin->stash.rank;
654:   PetscInt       *owners = xin->map->range,start = owners[rank];
655:   PetscInt       end = owners[rank+1],i,row;
656:   PetscScalar    *xx;

659: #if defined(PETSC_USE_BOPT_g)
660:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
661:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
662:   } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
663:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
664:   }
665: #endif
666:   VecGetArray(xin,&xx);
667:   xin->stash.insertmode = addv;

669:   if (addv == INSERT_VALUES) {
670:     for (i=0; i<ni; i++) {
671:       if ((row = ix[i]) >= start && row < end) {
672:         xx[row-start] = y[i];
673:       } else if (!xin->stash.donotstash) {
674:         if (ix[i] < 0) continue;
675: #if defined(PETSC_USE_BOPT_g)
676:         if (ix[i] >= xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->N);
677: #endif
678:         VecStashValue_Private(&xin->stash,row,y[i]);
679:       }
680:     }
681:   } else {
682:     for (i=0; i<ni; i++) {
683:       if ((row = ix[i]) >= start && row < end) {
684:         xx[row-start] += y[i];
685:       } else if (!xin->stash.donotstash) {
686:         if (ix[i] < 0) continue;
687: #if defined(PETSC_USE_BOPT_g)
688:         if (ix[i] > xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->N);
689: #endif        
690:         VecStashValue_Private(&xin->stash,row,y[i]);
691:       }
692:     }
693:   }
694:   VecRestoreArray(xin,&xx);
695:   return(0);
696: }

700: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
701: {
702:   PetscMPIInt    rank = xin->stash.rank;
703:   PetscInt            *owners = xin->map->range,start = owners[rank];
705:   PetscInt            end = owners[rank+1],i,row,bs = xin->bs,j;
706:   PetscScalar    *xx,*y = (PetscScalar*)yin;

709:   VecGetArray(xin,&xx);
710: #if defined(PETSC_USE_BOPT_g)
711:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
712:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
713:   }
714:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
715:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
716:   }
717: #endif
718:   xin->stash.insertmode = addv;

720:   if (addv == INSERT_VALUES) {
721:     for (i=0; i<ni; i++) {
722:       if ((row = bs*ix[i]) >= start && row < end) {
723:         for (j=0; j<bs; j++) {
724:           xx[row-start+j] = y[j];
725:         }
726:       } else if (!xin->stash.donotstash) {
727:         if (ix[i] < 0) continue;
728: #if defined(PETSC_USE_BOPT_g)
729:         if (ix[i] >= xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->N);
730: #endif
731:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
732:       }
733:       y += bs;
734:     }
735:   } else {
736:     for (i=0; i<ni; i++) {
737:       if ((row = bs*ix[i]) >= start && row < end) {
738:         for (j=0; j<bs; j++) {
739:           xx[row-start+j] += y[j];
740:         }
741:       } else if (!xin->stash.donotstash) {
742:         if (ix[i] < 0) continue;
743: #if defined(PETSC_USE_BOPT_g)
744:         if (ix[i] > xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->N);
745: #endif
746:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
747:       }
748:       y += bs;
749:     }
750:   }
751:   VecRestoreArray(xin,&xx);
752:   return(0);
753: }

755: /*
756:    Since nsends or nreceives may be zero we add 1 in certain mallocs
757: to make sure we never malloc an empty one.      
758: */
761: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
762: {
764:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
765:   PetscMPIInt    size;
766:   InsertMode     addv;
767:   MPI_Comm       comm = xin->comm;

770:   if (xin->stash.donotstash) {
771:     return(0);
772:   }

774:   MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
775:   if (addv == (ADD_VALUES|INSERT_VALUES)) {
776:     SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
777:   }
778:   xin->stash.insertmode = addv; /* in case this processor had no cache */
779: 
780:   bs = xin->bs;
781:   MPI_Comm_size(xin->comm,&size);
782:   if (!xin->bstash.bowners && xin->bs != -1) {
783:     PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
784:     for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
785:     xin->bstash.bowners = bowners;
786:   } else {
787:     bowners = xin->bstash.bowners;
788:   }
789:   VecStashScatterBegin_Private(&xin->stash,owners);
790:   VecStashScatterBegin_Private(&xin->bstash,bowners);
791:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
792:   PetscLogInfo(0,"VecAssemblyBegin_MPI:Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
793:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
794:   PetscLogInfo(0,"VecAssemblyBegin_MPI:Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);

796:   return(0);
797: }

801: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
802: {
804:   PetscInt       base,i,j,*row,flg,bs;
805:   PetscMPIInt    n;
806:   PetscScalar    *val,*vv,*array,*xarray;

809:   if (!vec->stash.donotstash) {
810:     VecGetArray(vec,&xarray);
811:     base = vec->map->range[vec->stash.rank];
812:     bs   = vec->bs;

814:     /* Process the stash */
815:     while (1) {
816:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
817:       if (!flg) break;
818:       if (vec->stash.insertmode == ADD_VALUES) {
819:         for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
820:       } else if (vec->stash.insertmode == INSERT_VALUES) {
821:         for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
822:       } else {
823:         SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
824:       }
825:     }
826:     VecStashScatterEnd_Private(&vec->stash);

828:     /* now process the block-stash */
829:     while (1) {
830:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
831:       if (!flg) break;
832:       for (i=0; i<n; i++) {
833:         array = xarray+row[i]*bs-base;
834:         vv    = val+i*bs;
835:         if (vec->stash.insertmode == ADD_VALUES) {
836:           for (j=0; j<bs; j++) { array[j] += vv[j];}
837:         } else if (vec->stash.insertmode == INSERT_VALUES) {
838:           for (j=0; j<bs; j++) { array[j] = vv[j]; }
839:         } else {
840:           SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
841:         }
842:       }
843:     }
844:     VecStashScatterEnd_Private(&vec->bstash);
845:     VecRestoreArray(vec,&xarray);
846:   }
847:   vec->stash.insertmode = NOT_SET_VALUES;
848:   return(0);
849: }