Actual source code: pdvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
 5:  #include src/vec/vec/impls/mpi/pvecimpl.h
  6: #if defined(PETSC_HAVE_PNETCDF)
  8: #include "pnetcdf.h"
 10: #endif

 14: PetscErrorCode VecDestroy_MPI(Vec v)
 15: {
 16:   Vec_MPI        *x = (Vec_MPI*)v->data;

 20: #if defined(PETSC_USE_LOG)
 21:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map.N);
 22: #endif  
 23:   PetscFree(x->array_allocated);

 25:   /* Destroy local representation of vector if it exists */
 26:   if (x->localrep) {
 27:     VecDestroy(x->localrep);
 28:     if (x->localupdate) {VecScatterDestroy(x->localupdate);}
 29:   }
 30:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 31:   VecStashDestroy_Private(&v->bstash);
 32:   VecStashDestroy_Private(&v->stash);
 33:   PetscFree(x);
 34:   return(0);
 35: }

 39: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 40: {
 41:   PetscErrorCode    ierr;
 42:   PetscInt          i,work = xin->map.n,cnt,len,nLen;
 43:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 44:   MPI_Status        status;
 45:   PetscScalar       *values,*xarray;
 46:   const char        *name;
 47:   PetscViewerFormat format;

 50:   VecGetArray(xin,&xarray);
 51:   /* determine maximum message to arrive */
 52:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
 53:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,((PetscObject)xin)->comm);
 54:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

 56:   if (!rank) {
 57:     PetscMalloc((len+1)*sizeof(PetscScalar),&values);
 58:     PetscViewerGetFormat(viewer,&format);
 59:     /*
 60:         Matlab format and ASCII format are very similar except 
 61:         Matlab uses %18.16e format while ASCII uses %g
 62:     */
 63:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 64:       PetscObjectGetName((PetscObject)xin,&name);
 65:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 66:       for (i=0; i<xin->map.n; i++) {
 67: #if defined(PETSC_USE_COMPLEX)
 68:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 69:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
 70:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 71:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
 72:         } else {
 73:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
 74:         }
 75: #else
 76:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
 77: #endif
 78:       }
 79:       /* receive and print messages */
 80:       for (j=1; j<size; j++) {
 81:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
 82:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 83:         for (i=0; i<n; i++) {
 84: #if defined(PETSC_USE_COMPLEX)
 85:           if (PetscImaginaryPart(values[i]) > 0.0) {
 86:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
 87:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 88:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
 89:           } else {
 90:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
 91:           }
 92: #else
 93:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
 94: #endif
 95:         }
 96:       }
 97:       PetscViewerASCIIPrintf(viewer,"];\n");

 99:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
100:       for (i=0; i<xin->map.n; i++) {
101: #if defined(PETSC_USE_COMPLEX)
102:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
103: #else
104:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
105: #endif
106:       }
107:       /* receive and print messages */
108:       for (j=1; j<size; j++) {
109:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
110:         MPI_Get_count(&status,MPIU_SCALAR,&n);
111:         for (i=0; i<n; i++) {
112: #if defined(PETSC_USE_COMPLEX)
113:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
114: #else
115:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
116: #endif
117:         }
118:       }
119:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
120:       /*
121:         state 0: No header has been output
122:         state 1: Only POINT_DATA has been output
123:         state 2: Only CELL_DATA has been output
124:         state 3: Output both, POINT_DATA last
125:         state 4: Output both, CELL_DATA last
126:       */
127:       static PetscInt stateId = -1;
128:       int outputState = 0;
129:       PetscTruth hasState;
130:       int doOutput = 0;
131:       PetscInt bs, b;

133:       if (stateId < 0) {
134:         PetscObjectComposedDataRegister(&stateId);
135:       }
136:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
137:       if (!hasState) {
138:         outputState = 0;
139:       }
140:       PetscObjectGetName((PetscObject) xin, &name);
141:       VecGetLocalSize(xin, &nLen);
142:       n = (PetscMPIInt) nLen;
143:       VecGetBlockSize(xin, &bs);
144:       if ((bs < 1) || (bs > 3)) {
145:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
146:       }
147:       if (format == PETSC_VIEWER_ASCII_VTK) {
148:         if (outputState == 0) {
149:           outputState = 1;
150:           doOutput = 1;
151:         } else if (outputState == 1) {
152:           doOutput = 0;
153:         } else if (outputState == 2) {
154:           outputState = 3;
155:           doOutput = 1;
156:         } else if (outputState == 3) {
157:           doOutput = 0;
158:         } else if (outputState == 4) {
159:           SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
160:         }
161:         if (doOutput) {
162:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map.N);
163:         }
164:       } else {
165:         if (outputState == 0) {
166:           outputState = 2;
167:           doOutput = 1;
168:         } else if (outputState == 1) {
169:           outputState = 4;
170:           doOutput = 1;
171:         } else if (outputState == 2) {
172:           doOutput = 0;
173:         } else if (outputState == 3) {
174:           SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
175:         } else if (outputState == 4) {
176:           doOutput = 0;
177:         }
178:         if (doOutput) {
179:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map.N);
180:         }
181:       }
182:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
183:       if (name) {
184:         PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
185:       } else {
186:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
187:       }
188:       PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
189:       for (i=0; i<n/bs; i++) {
190:         for (b=0; b<bs; b++) {
191:           if (b > 0) {
192:             PetscViewerASCIIPrintf(viewer," ");
193:           }
194: #if !defined(PETSC_USE_COMPLEX)
195:           PetscViewerASCIIPrintf(viewer,"%G",xarray[i*bs+b]);
196: #endif
197:         }
198:         PetscViewerASCIIPrintf(viewer,"\n");
199:       }
200:       for (j=1; j<size; j++) {
201:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
202:         MPI_Get_count(&status,MPIU_SCALAR,&n);
203:         for (i=0; i<n/bs; i++) {
204:           for (b=0; b<bs; b++) {
205:             if (b > 0) {
206:               PetscViewerASCIIPrintf(viewer," ");
207:             }
208: #if !defined(PETSC_USE_COMPLEX)
209:             PetscViewerASCIIPrintf(viewer,"%G",values[i*bs+b]);
210: #endif
211:           }
212:           PetscViewerASCIIPrintf(viewer,"\n");
213:         }
214:       }
215:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
216:       PetscInt bs, b;

218:       VecGetLocalSize(xin, &nLen);
219:       n = (PetscMPIInt) nLen;
220:       VecGetBlockSize(xin, &bs);
221:       if ((bs < 1) || (bs > 3)) {
222:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
223:       }
224:       for (i=0; i<n/bs; i++) {
225:         for (b=0; b<bs; b++) {
226:           if (b > 0) {
227:             PetscViewerASCIIPrintf(viewer," ");
228:           }
229: #if !defined(PETSC_USE_COMPLEX)
230:           PetscViewerASCIIPrintf(viewer,"%G",xarray[i*bs+b]);
231: #endif
232:         }
233:         for (b=bs; b<3; b++) {
234:           PetscViewerASCIIPrintf(viewer," 0.0");
235:         }
236:         PetscViewerASCIIPrintf(viewer,"\n");
237:       }
238:       for (j=1; j<size; j++) {
239:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
240:         MPI_Get_count(&status,MPIU_SCALAR,&n);
241:         for (i=0; i<n/bs; i++) {
242:           for (b=0; b<bs; b++) {
243:             if (b > 0) {
244:               PetscViewerASCIIPrintf(viewer," ");
245:             }
246: #if !defined(PETSC_USE_COMPLEX)
247:             PetscViewerASCIIPrintf(viewer,"%G",values[i*bs+b]);
248: #endif
249:           }
250:           for (b=bs; b<3; b++) {
251:             PetscViewerASCIIPrintf(viewer," 0.0");
252:           }
253:           PetscViewerASCIIPrintf(viewer,"\n");
254:         }
255:       }
256:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
257:       PetscInt bs, b, vertexCount = 1;

259:       VecGetLocalSize(xin, &nLen);
260:       n = (PetscMPIInt) nLen;
261:       VecGetBlockSize(xin, &bs);
262:       if ((bs < 1) || (bs > 3)) {
263:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
264:       }
265:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map.N/bs);
266:       for (i=0; i<n/bs; i++) {
267:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
268:         for (b=0; b<bs; b++) {
269:           if (b > 0) {
270:             PetscViewerASCIIPrintf(viewer," ");
271:           }
272: #if !defined(PETSC_USE_COMPLEX)
273:           PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
274: #endif
275:         }
276:         PetscViewerASCIIPrintf(viewer,"\n");
277:       }
278:       for (j=1; j<size; j++) {
279:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
280:         MPI_Get_count(&status,MPIU_SCALAR,&n);
281:         for (i=0; i<n/bs; i++) {
282:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
283:           for (b=0; b<bs; b++) {
284:             if (b > 0) {
285:               PetscViewerASCIIPrintf(viewer," ");
286:             }
287: #if !defined(PETSC_USE_COMPLEX)
288:             PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
289: #endif
290:           }
291:           PetscViewerASCIIPrintf(viewer,"\n");
292:         }
293:       }
294:     } else if (format == PETSC_VIEWER_ASCII_PYLITH) {
295:       PetscInt bs, b, vertexCount = 1;

297:       VecGetLocalSize(xin, &nLen);
298:       n = (PetscMPIInt) nLen;
299:       VecGetBlockSize(xin, &bs);
300:       if (bs != 3) {
301:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PyLith can only handle 3D objects, but vector dimension is %d", bs);
302:       }
303:       PetscViewerASCIIPrintf(viewer,"#\n");
304:       PetscViewerASCIIPrintf(viewer,"#  Node      X-coord           Y-coord           Z-coord\n");
305:       PetscViewerASCIIPrintf(viewer,"#\n");
306:       for (i=0; i<n/bs; i++) {
307:         PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
308:         for (b=0; b<bs; b++) {
309:           if (b > 0) {
310:             PetscViewerASCIIPrintf(viewer," ");
311:           }
312: #if !defined(PETSC_USE_COMPLEX)
313:           PetscViewerASCIIPrintf(viewer,"% 16.8E",xarray[i*bs+b]);
314: #endif
315:         }
316:         PetscViewerASCIIPrintf(viewer,"\n");
317:       }
318:       for (j=1; j<size; j++) {
319:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
320:         MPI_Get_count(&status,MPIU_SCALAR,&n);
321:         for (i=0; i<n/bs; i++) {
322:           PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
323:           for (b=0; b<bs; b++) {
324:             if (b > 0) {
325:               PetscViewerASCIIPrintf(viewer," ");
326:             }
327: #if !defined(PETSC_USE_COMPLEX)
328:             PetscViewerASCIIPrintf(viewer,"% 16.8E",values[i*bs+b]);
329: #endif
330:           }
331:           PetscViewerASCIIPrintf(viewer,"\n");
332:         }
333:       }
334:     } else {
335:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
336:       cnt = 0;
337:       for (i=0; i<xin->map.n; i++) {
338:         if (format == PETSC_VIEWER_ASCII_INDEX) {
339:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
340:         }
341: #if defined(PETSC_USE_COMPLEX)
342:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
343:           PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
344:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
345:           PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
346:         } else {
347:           PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(xarray[i]));
348:         }
349: #else
350:         PetscViewerASCIIPrintf(viewer,"%G\n",xarray[i]);
351: #endif
352:       }
353:       /* receive and print messages */
354:       for (j=1; j<size; j++) {
355:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
356:         MPI_Get_count(&status,MPIU_SCALAR,&n);
357:         if (format != PETSC_VIEWER_ASCII_COMMON) {
358:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
359:         }
360:         for (i=0; i<n; i++) {
361:           if (format == PETSC_VIEWER_ASCII_INDEX) {
362:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
363:           }
364: #if defined(PETSC_USE_COMPLEX)
365:           if (PetscImaginaryPart(values[i]) > 0.0) {
366:             PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
367:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
368:             PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
369:           } else {
370:             PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(values[i]));
371:           }
372: #else
373:           PetscViewerASCIIPrintf(viewer,"%G\n",values[i]);
374: #endif
375:         }
376:       }
377:     }
378:     PetscFree(values);
379:   } else {
380:     /* send values */
381:     MPI_Send(xarray,xin->map.n,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
382:   }
383:   PetscViewerFlush(viewer);
384:   VecRestoreArray(xin,&xarray);
385:   return(0);
386: }

390: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
391: {
393:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,n;
394:   PetscInt       len,work = xin->map.n,j;
395:   int            fdes;
396:   MPI_Status     status;
397:   PetscScalar    *values,*xarray;
398:   FILE           *file;

401:   VecGetArray(xin,&xarray);
402:   PetscViewerBinaryGetDescriptor(viewer,&fdes);

404:   /* determine maximum message to arrive */
405:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
406:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,((PetscObject)xin)->comm);
407:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

409:   if (!rank) {
410:     PetscInt cookie = VEC_FILE_COOKIE;
411:     PetscBinaryWrite(fdes,&cookie,1,PETSC_INT,PETSC_FALSE);
412:     PetscBinaryWrite(fdes,&xin->map.N,1,PETSC_INT,PETSC_FALSE);
413:     PetscBinaryWrite(fdes,xarray,xin->map.n,PETSC_SCALAR,PETSC_FALSE);

415:     PetscMalloc((len+1)*sizeof(PetscScalar),&values);
416:     /* receive and print messages */
417:     for (j=1; j<size; j++) {
418:       MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
419:       MPI_Get_count(&status,MPIU_SCALAR,&n);
420:       PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_FALSE);
421:     }
422:     PetscFree(values);
423:     PetscViewerBinaryGetInfoPointer(viewer,&file);
424:     if (file && xin->map.bs > 1) {
425:       if (((PetscObject)xin)->prefix) {
426:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,xin->map.bs);
427:       } else {
428:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map.bs);
429:       }
430:     }
431:   } else {
432:     /* send values */
433:     MPI_Send(xarray,xin->map.n,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
434:   }
435:   VecRestoreArray(xin,&xarray);
436:   return(0);
437: }

441: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
442: {
443:   PetscDraw      draw;
444:   PetscTruth     isnull;

447: #if defined(PETSC_USE_64BIT_INDICES)
449:   PetscViewerDrawGetDraw(viewer,0,&draw);
450:   PetscDrawIsNull(draw,&isnull);
451:   if (isnull) return(0);
452:   SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
453: #else
454:   PetscMPIInt    size,rank;
455:   int            i,N = xin->map.N,*lens;
456:   PetscReal      *xx,*yy;
457:   PetscDrawLG    lg;
458:   PetscScalar    *xarray;

461:   PetscViewerDrawGetDraw(viewer,0,&draw);
462:   PetscDrawIsNull(draw,&isnull);
463:   if (isnull) return(0);

465:   VecGetArray(xin,&xarray);
466:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
467:   PetscDrawCheckResizedWindow(draw);
468:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
469:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
470:   if (!rank) {
471:     PetscDrawLGReset(lg);
472:     PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
473:     for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
474:     yy   = xx + N;
475:     PetscMalloc(size*sizeof(PetscInt),&lens);
476:     for (i=0; i<size; i++) {
477:       lens[i] = xin->map.range[i+1] - xin->map.range[i];
478:     }
479: #if !defined(PETSC_USE_COMPLEX)
480:     MPI_Gatherv(xarray,xin->map.n,MPIU_REAL,yy,lens,xin->map.range,MPIU_REAL,0,((PetscObject)xin)->comm);
481: #else
482:     {
483:       PetscReal *xr;
484:       PetscMalloc((xin->map.n+1)*sizeof(PetscReal),&xr);
485:       for (i=0; i<xin->map.n; i++) {
486:         xr[i] = PetscRealPart(xarray[i]);
487:       }
488:       MPI_Gatherv(xr,xin->map.n,MPIU_REAL,yy,lens,xin->map.range,MPIU_REAL,0,((PetscObject)xin)->comm);
489:       PetscFree(xr);
490:     }
491: #endif
492:     PetscFree(lens);
493:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
494:     PetscFree(xx);
495:   } else {
496: #if !defined(PETSC_USE_COMPLEX)
497:     MPI_Gatherv(xarray,xin->map.n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
498: #else
499:     {
500:       PetscReal *xr;
501:       PetscMalloc((xin->map.n+1)*sizeof(PetscReal),&xr);
502:       for (i=0; i<xin->map.n; i++) {
503:         xr[i] = PetscRealPart(xarray[i]);
504:       }
505:       MPI_Gatherv(xr,xin->map.n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
506:       PetscFree(xr);
507:     }
508: #endif
509:   }
510:   PetscDrawLGDraw(lg);
511:   PetscDrawSynchronizedFlush(draw);
512:   VecRestoreArray(xin,&xarray);
513: #endif
514:   return(0);
515: }

518: /* I am assuming this is Extern 'C' because it is dynamically loaded.  If not, we can remove the DLLEXPORT tag */
521: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
522: {
524:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
525:   PetscInt       i,start,end;
526:   MPI_Status     status;
527:   PetscReal      coors[4],ymin,ymax,xmin,xmax,tmp;
528:   PetscDraw      draw;
529:   PetscTruth     isnull;
530:   PetscDrawAxis  axis;
531:   PetscScalar    *xarray;

534:   PetscViewerDrawGetDraw(viewer,0,&draw);
535:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

537:   VecGetArray(xin,&xarray);
538:   PetscDrawCheckResizedWindow(draw);
539:   xmin = 1.e20; xmax = -1.e20;
540:   for (i=0; i<xin->map.n; i++) {
541: #if defined(PETSC_USE_COMPLEX)
542:     if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
543:     if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
544: #else
545:     if (xarray[i] < xmin) xmin = xarray[i];
546:     if (xarray[i] > xmax) xmax = xarray[i];
547: #endif
548:   }
549:   if (xmin + 1.e-10 > xmax) {
550:     xmin -= 1.e-5;
551:     xmax += 1.e-5;
552:   }
553:   MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,((PetscObject)xin)->comm);
554:   MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,((PetscObject)xin)->comm);
555:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
556:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
557:   PetscDrawAxisCreate(draw,&axis);
558:   PetscLogObjectParent(draw,axis);
559:   if (!rank) {
560:     PetscDrawClear(draw);
561:     PetscDrawFlush(draw);
562:     PetscDrawAxisSetLimits(axis,0.0,(double)xin->map.N,ymin,ymax);
563:     PetscDrawAxisDraw(axis);
564:     PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
565:   }
566:   PetscDrawAxisDestroy(axis);
567:   MPI_Bcast(coors,4,MPIU_REAL,0,((PetscObject)xin)->comm);
568:   if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
569:   /* draw local part of vector */
570:   VecGetOwnershipRange(xin,&start,&end);
571:   if (rank < size-1) { /*send value to right */
572:     MPI_Send(&xarray[xin->map.n-1],1,MPIU_REAL,rank+1,tag,((PetscObject)xin)->comm);
573:   }
574:   for (i=1; i<xin->map.n; i++) {
575: #if !defined(PETSC_USE_COMPLEX)
576:     PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
577:                    xarray[i],PETSC_DRAW_RED);
578: #else
579:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
580:                    PetscRealPart(xarray[i]),PETSC_DRAW_RED);
581: #endif
582:   }
583:   if (rank) { /* receive value from right */
584:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,((PetscObject)xin)->comm,&status);
585: #if !defined(PETSC_USE_COMPLEX)
586:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
587: #else
588:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
589: #endif
590:   }
591:   PetscDrawSynchronizedFlush(draw);
592:   PetscDrawPause(draw);
593:   VecRestoreArray(xin,&xarray);
594:   return(0);
595: }

598: #if defined(PETSC_HAVE_MATLAB_ENGINE)
601: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
602: {
604:   PetscMPIInt    rank,size,*lens;
605:   PetscInt       i,N = xin->map.N;
606:   PetscScalar    *xx,*xarray;

609:   VecGetArray(xin,&xarray);
610:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
611:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
612:   if (!rank) {
613:     PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
614:     PetscMalloc(size*sizeof(PetscMPIInt),&lens);
615:     for (i=0; i<size; i++) {
616:       lens[i] = xin->map.range[i+1] - xin->map.range[i];
617:     }
618:     MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,xx,lens,xin->map.range,MPIU_SCALAR,0,((PetscObject)xin)->comm);
619:     PetscFree(lens);

621:     PetscObjectName((PetscObject)xin);
622:     PetscViewerMatlabPutArray(viewer,N,1,xx,xin->name);

624:     PetscFree(xx);
625:   } else {
626:     MPI_Gatherv(xarray,xin->map.n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,((PetscObject)xin)->comm);
627:   }
628:   VecRestoreArray(xin,&xarray);
629:   return(0);
630: }
631: #endif

633: #if defined(PETSC_HAVE_PNETCDF)
636: PetscErrorCode VecView_MPI_Netcdf(Vec xin,PetscViewer v)
637: {
639:   int         n = xin->map.n,ncid,xdim,xdim_num=1,xin_id,xstart;
640:   PetscScalar *xarray;

643:   VecGetArray(xin,&xarray);
644:   PetscViewerNetcdfGetID(v,&ncid);
645:   if (ncid < 0) SETERRQ(PETSC_ERR_ORDER,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
646:   /* define dimensions */
647:   ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->map.N,&xdim);
648:   /* define variables */
649:   ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
650:   /* leave define mode */
651:   ncmpi_enddef(ncid);
652:   /* store the vector */
653:   VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
654:   ncmpi_put_vara_double_all(ncid,xin_id,(const MPI_Offset*)&xstart,(const MPI_Offset*)&n,xarray);
655:   VecRestoreArray(xin,&xarray);
656:   return(0);
657: }
658: #endif

660: #if defined(PETSC_HAVE_HDF4)
663: PetscErrorCode VecView_MPI_HDF4_Ex(Vec X, PetscViewer viewer, int d, int *dims)
664: {
666:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
667:   int            len, i, j, k, cur, bs, n, N;
668:   MPI_Status     status;
669:   PetscScalar    *x;
670:   float          *xlf, *xf;


674:   bs = X->map.bs > 0 ? X->map.bs : 1;
675:   N  = X->map.N / bs;
676:   n  = X->map.n / bs;

678:   /* For now, always convert to float */
679:   PetscMalloc(N * sizeof(float), &xf);
680:   PetscMalloc(n * sizeof(float), &xlf);

682:   MPI_Comm_rank(((PetscObject)X)->comm, &rank);
683:   MPI_Comm_size(((PetscObject)X)->comm, &size);

685:   VecGetArray(X, &x);

687:   for (k = 0; k < bs; k++) {
688:     for (i = 0; i < n; i++) {
689:       xlf[i] = (float) x[i*bs + k];
690:     }
691:     if (!rank) {
692:       cur = 0;
693:       PetscMemcpy(xf + cur, xlf, n * sizeof(float));
694:       cur += n;
695:       for (j = 1; j < size; j++) {
696:         MPI_Recv(xf + cur, N - cur, MPI_FLOAT, j, tag, ((PetscObject)X)->comm,&status);
697:         MPI_Get_count(&status, MPI_FLOAT, &len);cur += len;
698:       }
699:       if (cur != N) {
700:         SETERRQ2(PETSC_ERR_PLIB, "? %D %D", cur, N);
701:       }
702:       PetscViewerHDF4WriteSDS(viewer, xf, 2, dims, bs);
703:     } else {
704:       MPI_Send(xlf, n, MPI_FLOAT, 0, tag, ((PetscObject)X)->comm);
705:     }
706:   }
707:   VecRestoreArray(X, &x);
708:   PetscFree(xlf);
709:   PetscFree(xf);
710:   return(0);
711: }
712: #endif

714: #if defined(PETSC_HAVE_HDF4)
717: PetscErrorCode VecView_MPI_HDF4(Vec xin,PetscViewer viewer)
718: {
720:   PetscErrorCode  bs, dims[1];

722:   bs = xin->map.bs > 0 ? xin->map.bs : 1;
723:   dims[0] = xin->map.N / bs;
724:   VecView_MPI_HDF4_Ex(xin, viewer, 1, dims);
725:   return(0);
726: }
727: #endif

729: #if defined(PETSC_HAVE_HDF5)

733: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
734: {
735:   hid_t  filespace; /* file dataspace identifier */
736:   hid_t         plist_id;  /* property list identifier */
737:   hid_t  dset_id;   /* dataset identifier */
738:   hid_t  memspace;  /* memory dataspace identifier */
739:   hid_t  file_id;
740:   herr_t status;
741:   /* PetscInt       bs        = xin->map.bs > 0 ? xin->map.bs : 1; */
742:   int            rank      = 1; /* Could have rank 2 for blocked vectors */
743:   hsize_t        dims[1]   = {xin->map.N};
744:   hsize_t        count[1]  = {xin->map.n};
745:   hsize_t        offset[1];
746:   PetscInt       low;
747:   PetscScalar   *x;

750:   PetscViewerHDF5GetFileId(viewer, &file_id);

752:   /* Create the dataspace for the dataset */
753:   filespace = H5Screate_simple(rank, dims, NULL);

755:   /* Create the dataset with default properties and close filespace */
756:   dset_id = H5Dcreate(file_id, "Vec", H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT);
757:   status = H5Sclose(filespace);CHKERRQ(status);

759:   /* Each process defines a dataset and writes it to the hyperslab in the file */
760:   memspace = H5Screate_simple(rank, count, NULL);

762:   /* Select hyperslab in the file */
763:   VecGetOwnershipRange(xin, &low, PETSC_NULL);
764:   offset[0] = low;
765:   filespace = H5Dget_space(dset_id);
766:   status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);

768:   /* Create property list for collective dataset write */
769:   plist_id = H5Pcreate(H5P_DATASET_XFER);
770: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
771:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
772: #endif
773:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

775:   VecGetArray(xin, &x);
776:   status = H5Dwrite(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
777:   status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
778:   VecRestoreArray(xin, &x);

780:   /* Close/release resources */
781:   status = H5Pclose(plist_id);CHKERRQ(status);
782:   status = H5Sclose(filespace);CHKERRQ(status);
783:   status = H5Sclose(memspace);CHKERRQ(status);
784:   status = H5Dclose(dset_id);CHKERRQ(status);
785:   return(0);
786: }
787: #endif

791: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
792: {
794:   PetscTruth     iascii,isbinary,isdraw;
795: #if defined(PETSC_HAVE_MATHEMATICA)
796:   PetscTruth     ismathematica;
797: #endif
798: #if defined(PETSC_HAVE_NETCDF)
799:   PetscTruth     isnetcdf;
800: #endif
801: #if defined(PETSC_HAVE_HDF4)
802:   PetscTruth     ishdf4;
803: #endif
804: #if defined(PETSC_HAVE_HDF5)
805:   PetscTruth     ishdf5;
806: #endif
807: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
808:   PetscTruth     ismatlab;
809: #endif

812:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
813:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
814:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
815: #if defined(PETSC_HAVE_MATHEMATICA)
816:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
817: #endif
818: #if defined(PETSC_HAVE_NETCDF)
819:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
820: #endif
821: #if defined(PETSC_HAVE_HDF4)
822:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF4,&ishdf4);
823: #endif
824: #if defined(PETSC_HAVE_HDF5)
825:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
826: #endif
827: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
828:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
829: #endif
830:   if (iascii){
831:     VecView_MPI_ASCII(xin,viewer);
832:   } else if (isbinary) {
833:     VecView_MPI_Binary(xin,viewer);
834:   } else if (isdraw) {
835:     PetscViewerFormat format;

837:     PetscViewerGetFormat(viewer,&format);
838:     if (format == PETSC_VIEWER_DRAW_LG) {
839:       VecView_MPI_Draw_LG(xin,viewer);
840:     } else {
841:       VecView_MPI_Draw(xin,viewer);
842:     }
843: #if defined(PETSC_HAVE_MATHEMATICA)
844:   } else if (ismathematica) {
845:     PetscViewerMathematicaPutVector(viewer,xin);
846: #endif
847: #if defined(PETSC_HAVE_NETCDF)
848:   } else if (isnetcdf) {
849:     VecView_MPI_Netcdf(xin,viewer);
850: #endif
851: #if defined(PETSC_HAVE_HDF4)
852:   } else if (ishdf4) {
853:     VecView_MPI_HDF4(xin,viewer);
854: #endif
855: #if defined(PETSC_HAVE_HDF5)
856:   } else if (ishdf5) {
857:     VecView_MPI_HDF5(xin,viewer);
858: #endif
859: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
860:   } else if (ismatlab) {
861:     VecView_MPI_Matlab(xin,viewer);
862: #endif
863:   } else {
864:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
865:   }
866:   return(0);
867: }

871: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
872: {
874:   *N = xin->map.N;
875:   return(0);
876: }

880: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
881: {
882:   Vec_MPI     *x = (Vec_MPI *)xin->data;
883:   PetscScalar *xx = x->array;
884:   PetscInt    i,tmp,start = xin->map.range[xin->stash.rank];

887:   for (i=0; i<ni; i++) {
888:     if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
889:     tmp = ix[i] - start;
890: #if defined(PETSC_USE_DEBUG)
891:     if (tmp < 0 || tmp >= xin->map.n) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
892: #endif
893:     y[i] = xx[tmp];
894:   }
895:   return(0);
896: }

900: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
901: {
903:   PetscMPIInt    rank = xin->stash.rank;
904:   PetscInt       *owners = xin->map.range,start = owners[rank];
905:   PetscInt       end = owners[rank+1],i,row;
906:   PetscScalar    *xx;

909: #if defined(PETSC_USE_DEBUG)
910:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
911:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
912:   } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
913:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
914:   }
915: #endif
916:   VecGetArray(xin,&xx);
917:   xin->stash.insertmode = addv;

919:   if (addv == INSERT_VALUES) {
920:     for (i=0; i<ni; i++) {
921:       if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
922: #if defined(PETSC_USE_DEBUG)
923:       if (ix[i] < 0) {
924:         SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
925:       }
926: #endif 
927:       if ((row = ix[i]) >= start && row < end) {
928:         xx[row-start] = y[i];
929:       } else if (!xin->stash.donotstash) {
930: #if defined(PETSC_USE_DEBUG)
931:         if (ix[i] >= xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map.N);
932: #endif
933:         VecStashValue_Private(&xin->stash,row,y[i]);
934:       }
935:     }
936:   } else {
937:     for (i=0; i<ni; i++) {
938:       if (xin->stash.ignorenegidx == PETSC_TRUE && ix[i] < 0) continue;
939: #if defined(PETSC_USE_DEBUG)
940:       if (ix[i] < 0) {
941:         SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
942:       }
943: #endif 
944:       if ((row = ix[i]) >= start && row < end) {
945:         xx[row-start] += y[i];
946:       } else if (!xin->stash.donotstash) {
947: #if defined(PETSC_USE_DEBUG)
948:         if (ix[i] > xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map.N);
949: #endif        
950:         VecStashValue_Private(&xin->stash,row,y[i]);
951:       }
952:     }
953:   }
954:   VecRestoreArray(xin,&xx);
955:   return(0);
956: }

960: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
961: {
962:   PetscMPIInt    rank = xin->stash.rank;
963:   PetscInt       *owners = xin->map.range,start = owners[rank];
965:   PetscInt       end = owners[rank+1],i,row,bs = xin->map.bs,j;
966:   PetscScalar    *xx,*y = (PetscScalar*)yin;

969:   VecGetArray(xin,&xx);
970: #if defined(PETSC_USE_DEBUG)
971:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
972:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
973:   }
974:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
975:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
976:   }
977: #endif
978:   xin->stash.insertmode = addv;

980:   if (addv == INSERT_VALUES) {
981:     for (i=0; i<ni; i++) {
982:       if ((row = bs*ix[i]) >= start && row < end) {
983:         for (j=0; j<bs; j++) {
984:           xx[row-start+j] = y[j];
985:         }
986:       } else if (!xin->stash.donotstash) {
987:         if (ix[i] < 0) continue;
988: #if defined(PETSC_USE_DEBUG)
989:         if (ix[i] >= xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map.N);
990: #endif
991:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
992:       }
993:       y += bs;
994:     }
995:   } else {
996:     for (i=0; i<ni; i++) {
997:       if ((row = bs*ix[i]) >= start && row < end) {
998:         for (j=0; j<bs; j++) {
999:           xx[row-start+j] += y[j];
1000:         }
1001:       } else if (!xin->stash.donotstash) {
1002:         if (ix[i] < 0) continue;
1003: #if defined(PETSC_USE_DEBUG)
1004:         if (ix[i] > xin->map.N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map.N);
1005: #endif
1006:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1007:       }
1008:       y += bs;
1009:     }
1010:   }
1011:   VecRestoreArray(xin,&xx);
1012:   return(0);
1013: }

1015: /*
1016:    Since nsends or nreceives may be zero we add 1 in certain mallocs
1017: to make sure we never malloc an empty one.      
1018: */
1021: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1022: {
1024:   PetscInt       *owners = xin->map.range,*bowners,i,bs,nstash,reallocs;
1025:   PetscMPIInt    size;
1026:   InsertMode     addv;
1027:   MPI_Comm       comm = ((PetscObject)xin)->comm;

1030:   if (xin->stash.donotstash) {
1031:     return(0);
1032:   }

1034:   MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
1035:   if (addv == (ADD_VALUES|INSERT_VALUES)) {
1036:     SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1037:   }
1038:   xin->stash.insertmode = addv; /* in case this processor had no cache */
1039: 
1040:   bs = xin->map.bs;
1041:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
1042:   if (!xin->bstash.bowners && xin->map.bs != -1) {
1043:     PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
1044:     for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
1045:     xin->bstash.bowners = bowners;
1046:   } else {
1047:     bowners = xin->bstash.bowners;
1048:   }
1049:   VecStashScatterBegin_Private(&xin->stash,owners);
1050:   VecStashScatterBegin_Private(&xin->bstash,bowners);
1051:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1052:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1053:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1054:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);

1056:   return(0);
1057: }

1061: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1062: {
1064:   PetscInt       base,i,j,*row,flg,bs;
1065:   PetscMPIInt    n;
1066:   PetscScalar    *val,*vv,*array,*xarray;

1069:   if (!vec->stash.donotstash) {
1070:     VecGetArray(vec,&xarray);
1071:     base = vec->map.range[vec->stash.rank];
1072:     bs   = vec->map.bs;

1074:     /* Process the stash */
1075:     while (1) {
1076:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1077:       if (!flg) break;
1078:       if (vec->stash.insertmode == ADD_VALUES) {
1079:         for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
1080:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1081:         for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
1082:       } else {
1083:         SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1084:       }
1085:     }
1086:     VecStashScatterEnd_Private(&vec->stash);

1088:     /* now process the block-stash */
1089:     while (1) {
1090:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1091:       if (!flg) break;
1092:       for (i=0; i<n; i++) {
1093:         array = xarray+row[i]*bs-base;
1094:         vv    = val+i*bs;
1095:         if (vec->stash.insertmode == ADD_VALUES) {
1096:           for (j=0; j<bs; j++) { array[j] += vv[j];}
1097:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1098:           for (j=0; j<bs; j++) { array[j] = vv[j]; }
1099:         } else {
1100:           SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1101:         }
1102:       }
1103:     }
1104:     VecStashScatterEnd_Private(&vec->bstash);
1105:     VecRestoreArray(vec,&xarray);
1106:   }
1107:   vec->stash.insertmode = NOT_SET_VALUES;
1108:   return(0);
1109: }