Actual source code: pdvec.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
  6: #include <petscviewerhdf5.h>

 10: PetscErrorCode VecDestroy_MPI(Vec v)
 11: {
 12:   Vec_MPI        *x = (Vec_MPI*)v->data;

 16: #if defined(PETSC_USE_LOG)
 17:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 18: #endif
 19:   if (!x) return(0);
 20:   PetscFree(x->array_allocated);

 22:   /* Destroy local representation of vector if it exists */
 23:   if (x->localrep) {
 24:     VecDestroy(&x->localrep);
 25:     VecScatterDestroy(&x->localupdate);
 26:   }
 27:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 28:   VecStashDestroy_Private(&v->bstash);
 29:   VecStashDestroy_Private(&v->stash);
 30:   PetscFree(v->data);
 31:   return(0);
 32: }

 36: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 37: {
 38:   PetscErrorCode    ierr;
 39:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 40:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 41:   MPI_Status        status;
 42:   PetscScalar       *values;
 43:   const PetscScalar *xarray;
 44:   const char        *name;
 45:   PetscViewerFormat format;

 48:   VecGetArrayRead(xin,&xarray);
 49:   /* determine maximum message to arrive */
 50:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
 51:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
 52:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);

 54:   PetscViewerGetFormat(viewer,&format);
 55:   if (!rank) {
 56:     PetscMalloc1(len,&values);
 57:     /*
 58:         MATLAB format and ASCII format are very similar except
 59:         MATLAB uses %18.16e format while ASCII uses %g
 60:     */
 61:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 62:       PetscObjectGetName((PetscObject)xin,&name);
 63:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 64:       for (i=0; i<xin->map->n; i++) {
 65: #if defined(PETSC_USE_COMPLEX)
 66:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 67:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
 68:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 69:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
 70:         } else {
 71:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
 72:         }
 73: #else
 74:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
 75: #endif
 76:       }
 77:       /* receive and print messages */
 78:       for (j=1; j<size; j++) {
 79:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
 80:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 81:         for (i=0; i<n; i++) {
 82: #if defined(PETSC_USE_COMPLEX)
 83:           if (PetscImaginaryPart(values[i]) > 0.0) {
 84:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
 85:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 86:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
 87:           } else {
 88:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
 89:           }
 90: #else
 91:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
 92: #endif
 93:         }
 94:       }
 95:       PetscViewerASCIIPrintf(viewer,"];\n");

 97:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
 98:       for (i=0; i<xin->map->n; i++) {
 99: #if defined(PETSC_USE_COMPLEX)
100:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
101: #else
102:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
103: #endif
104:       }
105:       /* receive and print messages */
106:       for (j=1; j<size; j++) {
107:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
108:         MPI_Get_count(&status,MPIU_SCALAR,&n);
109:         for (i=0; i<n; i++) {
110: #if defined(PETSC_USE_COMPLEX)
111:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
112: #else
113:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
114: #endif
115:         }
116:       }
117:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
118:       /*
119:         state 0: No header has been output
120:         state 1: Only POINT_DATA has been output
121:         state 2: Only CELL_DATA has been output
122:         state 3: Output both, POINT_DATA last
123:         state 4: Output both, CELL_DATA last
124:       */
125:       static PetscInt stateId     = -1;
126:       int             outputState = 0;
127:       int             doOutput    = 0;
128:       PetscBool       hasState;
129:       PetscInt        bs, b;

131:       if (stateId < 0) {
132:         PetscObjectComposedDataRegister(&stateId);
133:       }
134:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
135:       if (!hasState) outputState = 0;

137:       PetscObjectGetName((PetscObject)xin,&name);
138:       VecGetLocalSize(xin, &nLen);
139:       PetscMPIIntCast(nLen,&n);
140:       VecGetBlockSize(xin, &bs);
141:       if (format == PETSC_VIEWER_ASCII_VTK) {
142:         if (outputState == 0) {
143:           outputState = 1;
144:           doOutput    = 1;
145:         } else if (outputState == 1) doOutput = 0;
146:         else if (outputState == 2) {
147:           outputState = 3;
148:           doOutput    = 1;
149:         } else if (outputState == 3) doOutput = 0;
150:         else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");

152:         if (doOutput) {
153:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
154:         }
155:       } else {
156:         if (outputState == 0) {
157:           outputState = 2;
158:           doOutput    = 1;
159:         } else if (outputState == 1) {
160:           outputState = 4;
161:           doOutput    = 1;
162:         } else if (outputState == 2) doOutput = 0;
163:         else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
164:         else if (outputState == 4) doOutput = 0;

166:         if (doOutput) {
167:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
168:         }
169:       }
170:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
171:       if (name) {
172:         if (bs == 3) {
173:           PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
174:         } else {
175:           PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
176:         }
177:       } else {
178:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
179:       }
180:       if (bs != 3) {
181:         PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
182:       }
183:       for (i=0; i<n/bs; i++) {
184:         for (b=0; b<bs; b++) {
185:           if (b > 0) {
186:             PetscViewerASCIIPrintf(viewer," ");
187:           }
188:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
189:         }
190:         PetscViewerASCIIPrintf(viewer,"\n");
191:       }
192:       for (j=1; j<size; j++) {
193:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
194:         MPI_Get_count(&status,MPIU_SCALAR,&n);
195:         for (i=0; i<n/bs; i++) {
196:           for (b=0; b<bs; b++) {
197:             if (b > 0) {
198:               PetscViewerASCIIPrintf(viewer," ");
199:             }
200:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
201:           }
202:           PetscViewerASCIIPrintf(viewer,"\n");
203:         }
204:       }
205:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
206:       PetscInt bs, b;

208:       VecGetLocalSize(xin, &nLen);
209:       PetscMPIIntCast(nLen,&n);
210:       VecGetBlockSize(xin, &bs);
211:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);

213:       for (i=0; i<n/bs; i++) {
214:         for (b=0; b<bs; b++) {
215:           if (b > 0) {
216:             PetscViewerASCIIPrintf(viewer," ");
217:           }
218:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
219:         }
220:         for (b=bs; b<3; b++) {
221:           PetscViewerASCIIPrintf(viewer," 0.0");
222:         }
223:         PetscViewerASCIIPrintf(viewer,"\n");
224:       }
225:       for (j=1; j<size; j++) {
226:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
227:         MPI_Get_count(&status,MPIU_SCALAR,&n);
228:         for (i=0; i<n/bs; i++) {
229:           for (b=0; b<bs; b++) {
230:             if (b > 0) {
231:               PetscViewerASCIIPrintf(viewer," ");
232:             }
233:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
234:           }
235:           for (b=bs; b<3; b++) {
236:             PetscViewerASCIIPrintf(viewer," 0.0");
237:           }
238:           PetscViewerASCIIPrintf(viewer,"\n");
239:         }
240:       }
241:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
242:       PetscInt bs, b, vertexCount = 1;

244:       VecGetLocalSize(xin, &nLen);
245:       PetscMPIIntCast(nLen,&n);
246:       VecGetBlockSize(xin, &bs);
247:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);

249:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
250:       for (i=0; i<n/bs; i++) {
251:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
252:         for (b=0; b<bs; b++) {
253:           if (b > 0) {
254:             PetscViewerASCIIPrintf(viewer," ");
255:           }
256: #if !defined(PETSC_USE_COMPLEX)
257:           PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
258: #endif
259:         }
260:         PetscViewerASCIIPrintf(viewer,"\n");
261:       }
262:       for (j=1; j<size; j++) {
263:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
264:         MPI_Get_count(&status,MPIU_SCALAR,&n);
265:         for (i=0; i<n/bs; i++) {
266:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
267:           for (b=0; b<bs; b++) {
268:             if (b > 0) {
269:               PetscViewerASCIIPrintf(viewer," ");
270:             }
271: #if !defined(PETSC_USE_COMPLEX)
272:             PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
273: #endif
274:           }
275:           PetscViewerASCIIPrintf(viewer,"\n");
276:         }
277:       }
278:     } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
279:       /* No info */
280:     } else {
281:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
282:       cnt = 0;
283:       for (i=0; i<xin->map->n; i++) {
284:         if (format == PETSC_VIEWER_ASCII_INDEX) {
285:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
286:         }
287: #if defined(PETSC_USE_COMPLEX)
288:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
289:           PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
290:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
291:           PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
292:         } else {
293:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
294:         }
295: #else
296:         PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
297: #endif
298:       }
299:       /* receive and print messages */
300:       for (j=1; j<size; j++) {
301:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
302:         MPI_Get_count(&status,MPIU_SCALAR,&n);
303:         if (format != PETSC_VIEWER_ASCII_COMMON) {
304:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
305:         }
306:         for (i=0; i<n; i++) {
307:           if (format == PETSC_VIEWER_ASCII_INDEX) {
308:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
309:           }
310: #if defined(PETSC_USE_COMPLEX)
311:           if (PetscImaginaryPart(values[i]) > 0.0) {
312:             PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
313:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
314:             PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
315:           } else {
316:             PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
317:           }
318: #else
319:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
320: #endif
321:         }
322:       }
323:     }
324:     PetscFree(values);
325:   } else {
326:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
327:       /* Rank 0 is not trying to receive anything, so don't send anything */
328:     } else {
329:       if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
330:         /* this may be a collective operation so make sure everyone calls it */
331:         PetscObjectGetName((PetscObject)xin,&name);
332:       }
333:       MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
334:     }
335:   }
336:   PetscViewerFlush(viewer);
337:   VecRestoreArrayRead(xin,&xarray);
338:   return(0);
339: }

343: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
344: {
345:   PetscErrorCode    ierr;
346:   PetscMPIInt       rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
347:   PetscInt          len,n = xin->map->n,j,tr[2];
348:   int               fdes;
349:   MPI_Status        status;
350:   PetscScalar       *values;
351:   const PetscScalar *xarray;
352:   FILE              *file;
353: #if defined(PETSC_HAVE_MPIIO)
354:   PetscBool         isMPIIO;
355: #endif
356:   PetscBool         skipHeader;
357:   PetscInt          message_count,flowcontrolcount;
358:   PetscViewerFormat format;

361:   VecGetArrayRead(xin,&xarray);
362:   PetscViewerBinaryGetDescriptor(viewer,&fdes);
363:   PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);

365:   /* determine maximum message to arrive */
366:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
367:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);

369:   if (!skipHeader) {
370:     tr[0] = VEC_FILE_CLASSID;
371:     tr[1] = xin->map->N;
372:     PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
373:   }

375: #if defined(PETSC_HAVE_MPIIO)
376:   PetscViewerBinaryGetMPIIO(viewer,&isMPIIO);
377:   if (!isMPIIO) {
378: #endif
379:     PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
380:     if (!rank) {
381:       PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);

383:       len = 0;
384:       for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
385:       PetscMalloc1(len,&values);
386:       PetscMPIIntCast(len,&mesgsize);
387:       /* receive and save messages */
388:       for (j=1; j<size; j++) {
389:         PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
390:         MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
391:         MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
392:         n    = (PetscInt)mesglen;
393:         PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
394:       }
395:       PetscViewerFlowControlEndMaster(viewer,&message_count);
396:       PetscFree(values);
397:     } else {
398:       PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
399:       PetscMPIIntCast(xin->map->n,&mesgsize);
400:       MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
401:       PetscViewerFlowControlEndWorker(viewer,&message_count);
402:     }
403:     PetscViewerGetFormat(viewer,&format);
404:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
405:       MPI_Comm   comm;
406:       FILE       *info;
407:       const char *name;

409:       PetscObjectGetName((PetscObject)xin,&name);
410:       PetscObjectGetComm((PetscObject)viewer,&comm);
411:       PetscViewerBinaryGetInfoPointer(viewer,&info);
412:       PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
413:       PetscFPrintf(comm,info,"#$$ Set.%s = PetscBinaryRead(fd);\n",name);
414:       PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
415:     }
416: #if defined(PETSC_HAVE_MPIIO)
417:   } else {
418:     MPI_Offset   off;
419:     MPI_File     mfdes;
420:     PetscMPIInt  lsize;

422:     PetscMPIIntCast(n,&lsize);
423:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
424:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
425:     off += xin->map->rstart*sizeof(PetscScalar); /* off is MPI_Offset, not PetscMPIInt */
426:     MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
427:     MPIU_File_write_all(mfdes,(void*)xarray,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
428:     PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
429:   }
430: #endif

432:   VecRestoreArrayRead(xin,&xarray);
433:   if (!rank) {
434:     PetscViewerBinaryGetInfoPointer(viewer,&file);
435:     if (file) {
436:       if (((PetscObject)xin)->prefix) {
437:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,PetscAbs(xin->map->bs));
438:       } else {
439:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",PetscAbs(xin->map->bs));
440:       }
441:     }
442:   }
443:   return(0);
444: }

446: #include <petscdraw.h>
449: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
450: {
451:   PetscDraw      draw;
452:   PetscBool      isnull;

455: #if defined(PETSC_USE_64BIT_INDICES)
457:   PetscViewerDrawGetDraw(viewer,0,&draw);
458:   PetscDrawIsNull(draw,&isnull);
459:   if (isnull) return(0);
460:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported with 64 bit integers");
461: #else
462:   PetscMPIInt       size,rank;
463:   int               i,N = xin->map->N,*lens;
464:   PetscReal         *xx,*yy;
465:   PetscDrawLG       lg;
466:   const PetscScalar *xarray;

469:   PetscViewerDrawGetDraw(viewer,0,&draw);
470:   PetscDrawIsNull(draw,&isnull);
471:   if (isnull) return(0);

473:   VecGetArrayRead(xin,&xarray);
474:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
475:   PetscDrawCheckResizedWindow(draw);
476:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
477:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
478:   if (!rank) {
479:     PetscDrawLGReset(lg);
480:     PetscMalloc2(N,&xx,N,&yy);
481:     for (i=0; i<N; i++) xx[i] = (PetscReal) i;
482:     PetscMalloc1(size,&lens);
483:     for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];

485: #if !defined(PETSC_USE_COMPLEX)
486:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
487: #else
488:     {
489:       PetscReal *xr;
490:       PetscMalloc1((xin->map->n+1),&xr);
491:       for (i=0; i<xin->map->n; i++) xr[i] = PetscRealPart(xarray[i]);

493:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
494:       PetscFree(xr);
495:     }
496: #endif
497:     PetscFree(lens);
498:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
499:     PetscFree2(xx,yy);
500:   } else {
501: #if !defined(PETSC_USE_COMPLEX)
502:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
503: #else
504:     {
505:       PetscReal *xr;
506:       PetscMalloc1((xin->map->n+1),&xr);
507:       for (i=0; i<xin->map->n; i++) xr[i] = PetscRealPart(xarray[i]);

509:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
510:       PetscFree(xr);
511:     }
512: #endif
513:   }
514:   PetscDrawLGDraw(lg);
515:   PetscDrawSynchronizedFlush(draw);
516:   VecRestoreArrayRead(xin,&xarray);
517: #endif
518:   return(0);
519: }

523: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
524: {
525:   PetscErrorCode    ierr;
526:   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
527:   PetscInt          i,start,end;
528:   MPI_Status        status;
529:   PetscReal         coors[4],ymin,ymax,xmin,xmax,tmp = 0.0;
530:   PetscDraw         draw;
531:   PetscBool         isnull;
532:   PetscDrawAxis     axis;
533:   const PetscScalar *xarray;

536:   PetscViewerDrawGetDraw(viewer,0,&draw);
537:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

539:   VecGetArrayRead(xin,&xarray);
540:   PetscDrawCheckResizedWindow(draw);
541:   xmin = 1.e20; xmax = -1.e20;
542:   for (i=0; i<xin->map->n; i++) {
543: #if defined(PETSC_USE_COMPLEX)
544:     if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
545:     if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
546: #else
547:     if (xarray[i] < xmin) xmin = xarray[i];
548:     if (xarray[i] > xmax) xmax = xarray[i];
549: #endif
550:   }
551:   if (xmin + 1.e-10 > xmax) {
552:     xmin -= 1.e-5;
553:     xmax += 1.e-5;
554:   }
555:   MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPIU_MIN,0,PetscObjectComm((PetscObject)xin));
556:   MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPIU_MAX,0,PetscObjectComm((PetscObject)xin));
557:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
558:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
559:   PetscDrawAxisCreate(draw,&axis);
560:   PetscLogObjectParent((PetscObject)draw,(PetscObject)axis);
561:   if (!rank) {
562:     PetscDrawClear(draw);
563:     PetscDrawFlush(draw);
564:     PetscDrawAxisSetLimits(axis,0.0,(double)xin->map->N,ymin,ymax);
565:     PetscDrawAxisDraw(axis);
566:     PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
567:   }
568:   PetscDrawAxisDestroy(&axis);
569:   MPI_Bcast(coors,4,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
570:   if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
571:   /* draw local part of vector */
572:   VecGetOwnershipRange(xin,&start,&end);
573:   if (rank < size-1) { /*send value to right */
574:     MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
575:   }
576:   for (i=1; i<xin->map->n; i++) {
577: #if !defined(PETSC_USE_COMPLEX)
578:     PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),xarray[i],PETSC_DRAW_RED);
579: #else
580:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
581: #endif
582:   }
583:   if (rank) { /* receive value from right */
584:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
585: #if !defined(PETSC_USE_COMPLEX)
586:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
587: #else
588:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
589: #endif
590:   }
591:   PetscDrawSynchronizedFlush(draw);
592:   PetscDrawPause(draw);
593:   VecRestoreArrayRead(xin,&xarray);
594:   return(0);
595: }

597: #if defined(PETSC_HAVE_MATLAB_ENGINE)
600: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
601: {
602:   PetscErrorCode    ierr;
603:   PetscMPIInt       rank,size,*lens;
604:   PetscInt          i,N = xin->map->N;
605:   const PetscScalar *xarray;
606:   PetscScalar       *xx;

609:   VecGetArrayRead(xin,&xarray);
610:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
611:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
612:   if (!rank) {
613:     PetscMalloc1(N,&xx);
614:     PetscMalloc1(size,&lens);
615:     for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];

617:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
618:     PetscFree(lens);

620:     PetscObjectName((PetscObject)xin);
621:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

623:     PetscFree(xx);
624:   } else {
625:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
626:   }
627:   VecRestoreArrayRead(xin,&xarray);
628:   return(0);
629: }
630: #endif

632: #if defined(PETSC_HAVE_HDF5)
635: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
636: {
637:   /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
638:   hid_t             filespace; /* file dataspace identifier */
639:   hid_t             chunkspace; /* chunk dataset property identifier */
640:   hid_t             plist_id;  /* property list identifier */
641:   hid_t             dset_id;   /* dataset identifier */
642:   hid_t             memspace;  /* memory dataspace identifier */
643:   hid_t             file_id;
644:   hid_t             group;
645:   hid_t             scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
646:   herr_t            status;
647:   PetscInt          bs = PetscAbs(xin->map->bs);
648:   hsize_t           dim;
649:   hsize_t           maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
650:   PetscInt          timestep;
651:   PetscInt          low;
652:   const PetscScalar *x;
653:   const char        *vecname;
654:   PetscErrorCode    ierr;

657:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
658:   PetscViewerHDF5GetTimestep(viewer, &timestep);

660:   /* Create the dataspace for the dataset.
661:    *
662:    * dims - holds the current dimensions of the dataset
663:    *
664:    * maxDims - holds the maximum dimensions of the dataset (unlimited
665:    * for the number of time steps with the current dimensions for the
666:    * other dimensions; so only additional time steps can be added).
667:    *
668:    * chunkDims - holds the size of a single time step (required to
669:    * permit extending dataset).
670:    */
671:   dim = 0;
672:   if (timestep >= 0) {
673:     dims[dim]      = timestep+1;
674:     maxDims[dim]   = H5S_UNLIMITED;
675:     chunkDims[dim] = 1;
676:     ++dim;
677:   }
678:   PetscHDF5IntCast(xin->map->N/bs,dims + dim);

680:   maxDims[dim]   = dims[dim];
681:   chunkDims[dim] = dims[dim];
682:   ++dim;
683:   if (bs >= 1) {
684:     dims[dim]      = bs;
685:     maxDims[dim]   = dims[dim];
686:     chunkDims[dim] = dims[dim];
687:     ++dim;
688:   }
689: #if defined(PETSC_USE_COMPLEX)
690:   dims[dim]      = 2;
691:   maxDims[dim]   = dims[dim];
692:   chunkDims[dim] = dims[dim];
693:   ++dim;
694: #endif
695:   filespace = H5Screate_simple(dim, dims, maxDims);
696:   if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");

698: #if defined(PETSC_USE_REAL_SINGLE)
699:   scalartype = H5T_NATIVE_FLOAT;
700: #elif defined(PETSC_USE_REAL___FLOAT128)
701: #error "HDF5 output with 128 bit floats not supported."
702: #else
703:   scalartype = H5T_NATIVE_DOUBLE;
704: #endif

706:   /* Create the dataset with default properties and close filespace */
707:   PetscObjectGetName((PetscObject) xin, &vecname);
708:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
709:     /* Create chunk */
710:     chunkspace = H5Pcreate(H5P_DATASET_CREATE);
711:     if (chunkspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
712:     status = H5Pset_chunk(chunkspace, dim, chunkDims);CHKERRQ(status);

714: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
715:     dset_id = H5Dcreate2(group, vecname, scalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT);
716: #else
717:     dset_id = H5Dcreate(group, vecname, scalartype, filespace, H5P_DEFAULT);
718: #endif
719:     if (dset_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dcreate2()");
720:     status = H5Pclose(chunkspace);CHKERRQ(status);
721:   } else {
722:     dset_id = H5Dopen2(group, vecname, H5P_DEFAULT);
723:     status  = H5Dset_extent(dset_id, dims);CHKERRQ(status);
724:   }
725:   status = H5Sclose(filespace);CHKERRQ(status);

727:   /* Each process defines a dataset and writes it to the hyperslab in the file */
728:   dim = 0;
729:   if (timestep >= 0) {
730:     count[dim] = 1;
731:     ++dim;
732:   }
733:   PetscHDF5IntCast(xin->map->n/bs,count + dim);
734:   ++dim;
735:   if (bs >= 1) {
736:     count[dim] = bs;
737:     ++dim;
738:   }
739: #if defined(PETSC_USE_COMPLEX)
740:   count[dim] = 2;
741:   ++dim;
742: #endif
743:   if (xin->map->n > 0) {
744:     memspace = H5Screate_simple(dim, count, NULL);
745:     if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");
746:   } else {
747:     /* Can't create dataspace with zero for any dimension, so create null dataspace. */
748:     memspace = H5Screate(H5S_NULL);
749:     if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate()");
750:   }

752:   /* Select hyperslab in the file */
753:   VecGetOwnershipRange(xin, &low, NULL);
754:   dim  = 0;
755:   if (timestep >= 0) {
756:     offset[dim] = timestep;
757:     ++dim;
758:   }
759:   PetscHDF5IntCast(low/bs,offset + dim);
760:   ++dim;
761:   if (bs >= 1) {
762:     offset[dim] = 0;
763:     ++dim;
764:   }
765: #if defined(PETSC_USE_COMPLEX)
766:   offset[dim] = 0;
767:   ++dim;
768: #endif
769:   if (xin->map->n > 0) {
770:     filespace = H5Dget_space(dset_id);
771:     if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dget_space()");
772:     status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
773:   } else {
774:     /* Create null filespace to match null memspace. */
775:     filespace = H5Screate(H5S_NULL);
776:     if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate(H5S_NULL)");
777:   }

779:   /* Create property list for collective dataset write */
780:   plist_id = H5Pcreate(H5P_DATASET_XFER);
781:   if (plist_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
782: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
783:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
784: #endif
785:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

787:   VecGetArrayRead(xin, &x);
788:   status = H5Dwrite(dset_id, scalartype, memspace, filespace, plist_id, x);CHKERRQ(status);
789:   status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
790:   VecRestoreArrayRead(xin, &x);

792:   /* Close/release resources */
793:   if (group != file_id) {
794:     status = H5Gclose(group);CHKERRQ(status);
795:   }
796:   status = H5Pclose(plist_id);CHKERRQ(status);
797:   status = H5Sclose(filespace);CHKERRQ(status);
798:   status = H5Sclose(memspace);CHKERRQ(status);
799:   status = H5Dclose(dset_id);CHKERRQ(status);
800:   PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
801:   return(0);
802: }
803: #endif

807: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
808: {
810:   PetscBool      iascii,isbinary,isdraw;
811: #if defined(PETSC_HAVE_MATHEMATICA)
812:   PetscBool      ismathematica;
813: #endif
814: #if defined(PETSC_HAVE_HDF5)
815:   PetscBool      ishdf5;
816: #endif
817: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
818:   PetscBool      ismatlab;
819: #endif

822:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
823:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
824:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
825: #if defined(PETSC_HAVE_MATHEMATICA)
826:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
827: #endif
828: #if defined(PETSC_HAVE_HDF5)
829:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
830: #endif
831: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
832:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
833: #endif
834:   if (iascii) {
835:     VecView_MPI_ASCII(xin,viewer);
836:   } else if (isbinary) {
837:     VecView_MPI_Binary(xin,viewer);
838:   } else if (isdraw) {
839:     PetscViewerFormat format;

841:     PetscViewerGetFormat(viewer,&format);
842:     if (format == PETSC_VIEWER_DRAW_LG) {
843:       VecView_MPI_Draw_LG(xin,viewer);
844:     } else {
845:       VecView_MPI_Draw(xin,viewer);
846:     }
847: #if defined(PETSC_HAVE_MATHEMATICA)
848:   } else if (ismathematica) {
849:     PetscViewerMathematicaPutVector(viewer,xin);
850: #endif
851: #if defined(PETSC_HAVE_HDF5)
852:   } else if (ishdf5) {
853:     VecView_MPI_HDF5(xin,viewer);
854: #endif
855: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
856:   } else if (ismatlab) {
857:     VecView_MPI_Matlab(xin,viewer);
858: #endif
859:   }
860:   return(0);
861: }

865: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
866: {
868:   *N = xin->map->N;
869:   return(0);
870: }

874: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
875: {
876:   const PetscScalar *xx;
877:   PetscInt          i,tmp,start = xin->map->range[xin->stash.rank];
878:   PetscErrorCode    ierr;

881:   VecGetArrayRead(xin,&xx);
882:   for (i=0; i<ni; i++) {
883:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
884:     tmp = ix[i] - start;
885: #if defined(PETSC_USE_DEBUG)
886:     if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
887: #endif
888:     y[i] = xx[tmp];
889:   }
890:   VecRestoreArrayRead(xin,&xx);
891:   return(0);
892: }

896: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
897: {
899:   PetscMPIInt    rank    = xin->stash.rank;
900:   PetscInt       *owners = xin->map->range,start = owners[rank];
901:   PetscInt       end     = owners[rank+1],i,row;
902:   PetscScalar    *xx;

905: #if defined(PETSC_USE_DEBUG)
906:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
907:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
908: #endif
909:   VecGetArray(xin,&xx);
910:   xin->stash.insertmode = addv;

912:   if (addv == INSERT_VALUES) {
913:     for (i=0; i<ni; i++) {
914:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
915: #if defined(PETSC_USE_DEBUG)
916:       if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
917: #endif
918:       if ((row = ix[i]) >= start && row < end) {
919:         xx[row-start] = y[i];
920:       } else if (!xin->stash.donotstash) {
921: #if defined(PETSC_USE_DEBUG)
922:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
923: #endif
924:         VecStashValue_Private(&xin->stash,row,y[i]);
925:       }
926:     }
927:   } else {
928:     for (i=0; i<ni; i++) {
929:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
930: #if defined(PETSC_USE_DEBUG)
931:       if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
932: #endif
933:       if ((row = ix[i]) >= start && row < end) {
934:         xx[row-start] += y[i];
935:       } else if (!xin->stash.donotstash) {
936: #if defined(PETSC_USE_DEBUG)
937:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
938: #endif
939:         VecStashValue_Private(&xin->stash,row,y[i]);
940:       }
941:     }
942:   }
943:   VecRestoreArray(xin,&xx);
944:   return(0);
945: }

949: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
950: {
951:   PetscMPIInt    rank    = xin->stash.rank;
952:   PetscInt       *owners = xin->map->range,start = owners[rank];
954:   PetscInt       end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
955:   PetscScalar    *xx,*y = (PetscScalar*)yin;

958:   VecGetArray(xin,&xx);
959: #if defined(PETSC_USE_DEBUG)
960:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
961:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
962: #endif
963:   xin->stash.insertmode = addv;

965:   if (addv == INSERT_VALUES) {
966:     for (i=0; i<ni; i++) {
967:       if ((row = bs*ix[i]) >= start && row < end) {
968:         for (j=0; j<bs; j++) xx[row-start+j] = y[j];
969:       } else if (!xin->stash.donotstash) {
970:         if (ix[i] < 0) continue;
971: #if defined(PETSC_USE_DEBUG)
972:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
973: #endif
974:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
975:       }
976:       y += bs;
977:     }
978:   } else {
979:     for (i=0; i<ni; i++) {
980:       if ((row = bs*ix[i]) >= start && row < end) {
981:         for (j=0; j<bs; j++) xx[row-start+j] += y[j];
982:       } else if (!xin->stash.donotstash) {
983:         if (ix[i] < 0) continue;
984: #if defined(PETSC_USE_DEBUG)
985:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
986: #endif
987:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
988:       }
989:       y += bs;
990:     }
991:   }
992:   VecRestoreArray(xin,&xx);
993:   return(0);
994: }

996: /*
997:    Since nsends or nreceives may be zero we add 1 in certain mallocs
998: to make sure we never malloc an empty one.
999: */
1002: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1003: {
1005:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1006:   PetscMPIInt    size;
1007:   InsertMode     addv;
1008:   MPI_Comm       comm;

1011:   PetscObjectGetComm((PetscObject)xin,&comm);
1012:   if (xin->stash.donotstash) return(0);

1014:   MPI_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
1015:   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1016:   xin->stash.insertmode = addv; /* in case this processor had no cache */

1018:   VecGetBlockSize(xin,&bs);
1019:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
1020:   if (!xin->bstash.bowners && xin->map->bs != -1) {
1021:     PetscMalloc1((size+1),&bowners);
1022:     for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
1023:     xin->bstash.bowners = bowners;
1024:   } else bowners = xin->bstash.bowners;

1026:   VecStashScatterBegin_Private(&xin->stash,owners);
1027:   VecStashScatterBegin_Private(&xin->bstash,bowners);
1028:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1029:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1030:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1031:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1032:   return(0);
1033: }

1037: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1038: {
1040:   PetscInt       base,i,j,*row,flg,bs;
1041:   PetscMPIInt    n;
1042:   PetscScalar    *val,*vv,*array,*xarray;

1045:   if (!vec->stash.donotstash) {
1046:     VecGetArray(vec,&xarray);
1047:     VecGetBlockSize(vec,&bs);
1048:     base = vec->map->range[vec->stash.rank];

1050:     /* Process the stash */
1051:     while (1) {
1052:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1053:       if (!flg) break;
1054:       if (vec->stash.insertmode == ADD_VALUES) {
1055:         for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1056:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1057:         for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1058:       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1059:     }
1060:     VecStashScatterEnd_Private(&vec->stash);

1062:     /* now process the block-stash */
1063:     while (1) {
1064:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1065:       if (!flg) break;
1066:       for (i=0; i<n; i++) {
1067:         array = xarray+row[i]*bs-base;
1068:         vv    = val+i*bs;
1069:         if (vec->stash.insertmode == ADD_VALUES) {
1070:           for (j=0; j<bs; j++) array[j] += vv[j];
1071:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1072:           for (j=0; j<bs; j++) array[j] = vv[j];
1073:         } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1074:       }
1075:     }
1076:     VecStashScatterEnd_Private(&vec->bstash);
1077:     VecRestoreArray(vec,&xarray);
1078:   }
1079:   vec->stash.insertmode = NOT_SET_VALUES;
1080:   return(0);
1081: }