Actual source code: pdvec.c
1: /* $Id: pdvec.c,v 1.148 2001/04/10 19:34:57 bsmith Exp $*/
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include "src/vec/impls/mpi/pvecimpl.h" /*I "petscvec.h" I*/
7: int VecGetOwnershipRange_MPI(Vec v,int *low,int* high)
8: {
10: *low = v->map->rstart;
11: *high = v->map->rend;
12: return(0);
13: }
15: int VecDestroy_MPI(Vec v)
16: {
17: Vec_MPI *x = (Vec_MPI*)v->data;
18: int ierr;
22: /* if memory was published with AMS then destroy it */
23: PetscObjectDepublish(v);
25: #if defined(PETSC_USE_LOG)
26: PetscLogObjectState((PetscObject)v,"Length=%d",v->N);
27: #endif
28: if (x->array_allocated) {PetscFree(x->array_allocated);}
30: /* Destroy local representation of vector if it exists */
31: if (x->localrep) {
32: VecDestroy(x->localrep);
33: if (x->localupdate) {VecScatterDestroy(x->localupdate);}
34: }
35: /* Destroy the stashes: note the order - so that the tags are freed properly */
36: VecStashDestroy_Private(&v->bstash);
37: VecStashDestroy_Private(&v->stash);
38: if (x->browners) {PetscFree(x->browners);}
39: PetscFree(x);
40: return(0);
41: }
43: int VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
44: {
45: Vec_MPI *x = (Vec_MPI*)xin->data;
46: int i,rank,len,work = xin->n,n,j,size,ierr,cnt,tag = ((PetscObject)viewer)->tag;
47: MPI_Status status;
48: Scalar *values;
49: char *name;
50: PetscViewerFormat format;
53: /* determine maximum message to arrive */
54: MPI_Comm_rank(xin->comm,&rank);
55: MPI_Reduce(&work,&len,1,MPI_INT,MPI_MAX,0,xin->comm);
56: MPI_Comm_size(xin->comm,&size);
58: if (!rank) {
59: PetscMalloc((len+1)*sizeof(Scalar),&values);
60: PetscViewerGetFormat(viewer,&format);
61: /*
62: Matlab format and ASCII format are very similar except
63: Matlab uses %18.16e format while ASCII uses %g
64: */
65: if (format == PETSC_VIEWER_ASCII_MATLAB) {
66: PetscObjectGetName((PetscObject)viewer,&name);
67: PetscViewerASCIIPrintf(viewer,"%s = [n",name);
68: for (i=0; i<xin->n; i++) {
69: #if defined(PETSC_USE_COMPLEX)
70: if (PetscImaginaryPart(x->array[i]) > 0.0) {
71: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ein",PetscRealPart(x->array[i]),PetscImaginaryPart(x->array[i]));
72: } else if (PetscImaginaryPart(x->array[i]) < 0.0) {
73: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ein",PetscRealPart(x->array[i]),-PetscImaginaryPart(x->array[i]));
74: } else {
75: PetscViewerASCIIPrintf(viewer,"%18.16en",PetscRealPart(x->array[i]));
76: }
77: #else
78: PetscViewerASCIIPrintf(viewer,"%18.16en",x->array[i]);
79: #endif
80: }
81: /* receive and print messages */
82: for (j=1; j<size; j++) {
83: MPI_Recv(values,len,MPIU_SCALAR,j,tag,xin->comm,&status);
84: MPI_Get_count(&status,MPIU_SCALAR,&n);
85: for (i=0; i<n; i++) {
86: #if defined(PETSC_USE_COMPLEX)
87: if (PetscImaginaryPart(values[i]) > 0.0) {
88: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e in",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
89: } else if (PetscImaginaryPart(values[i]) < 0.0) {
90: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e in",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
91: } else {
92: PetscViewerASCIIPrintf(viewer,"%18.16en",PetscRealPart(values[i]));
93: }
94: #else
95: PetscViewerASCIIPrintf(viewer,"%18.16en",values[i]);
96: #endif
97: }
98: }
99: PetscViewerASCIIPrintf(viewer,"];n");
101: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
102: for (i=0; i<xin->n; i++) {
103: #if defined(PETSC_USE_COMPLEX)
104: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16en",PetscRealPart(x->array[i]),PetscImaginaryPart(x->array[i]));
105: #else
106: PetscViewerASCIIPrintf(viewer,"%18.16en",x->array[i]);
107: #endif
108: }
109: /* receive and print messages */
110: for (j=1; j<size; j++) {
111: MPI_Recv(values,len,MPIU_SCALAR,j,tag,xin->comm,&status);
112: MPI_Get_count(&status,MPIU_SCALAR,&n);
113: for (i=0; i<n; i++) {
114: #if defined(PETSC_USE_COMPLEX)
115: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16en",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
116: #else
117: PetscViewerASCIIPrintf(viewer,"%18.16en",values[i]);
118: #endif
119: }
120: }
122: } else {
123: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Processor [%d]n",rank);}
124: cnt = 0;
125: for (i=0; i<xin->n; i++) {
126: if (format == PETSC_VIEWER_ASCII_INDEX) {
127: PetscViewerASCIIPrintf(viewer,"%d: ",cnt++);
128: }
129: #if defined(PETSC_USE_COMPLEX)
130: if (PetscImaginaryPart(x->array[i]) > 0.0) {
131: PetscViewerASCIIPrintf(viewer,"%g + %g in",PetscRealPart(x->array[i]),PetscImaginaryPart(x->array[i]));
132: } else if (PetscImaginaryPart(x->array[i]) < 0.0) {
133: PetscViewerASCIIPrintf(viewer,"%g - %g in",PetscRealPart(x->array[i]),-PetscImaginaryPart(x->array[i]));
134: } else {
135: PetscViewerASCIIPrintf(viewer,"%gn",PetscRealPart(x->array[i]));
136: }
137: #else
138: PetscViewerASCIIPrintf(viewer,"%gn",x->array[i]);
139: #endif
140: }
141: /* receive and print messages */
142: for (j=1; j<size; j++) {
143: MPI_Recv(values,len,MPIU_SCALAR,j,tag,xin->comm,&status);
144: MPI_Get_count(&status,MPIU_SCALAR,&n);
145: if (format != PETSC_VIEWER_ASCII_COMMON) {
146: PetscViewerASCIIPrintf(viewer,"Processor [%d]n",j);
147: }
148: for (i=0; i<n; i++) {
149: if (format == PETSC_VIEWER_ASCII_INDEX) {
150: PetscViewerASCIIPrintf(viewer,"%d: ",cnt++);
151: }
152: #if defined(PETSC_USE_COMPLEX)
153: if (PetscImaginaryPart(values[i]) > 0.0) {
154: PetscViewerASCIIPrintf(viewer,"%g + %g in",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
155: } else if (PetscImaginaryPart(values[i]) < 0.0) {
156: PetscViewerASCIIPrintf(viewer,"%g - %g in",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
157: } else {
158: PetscViewerASCIIPrintf(viewer,"%gn",PetscRealPart(values[i]));
159: }
160: #else
161: PetscViewerASCIIPrintf(viewer,"%gn",values[i]);
162: #endif
163: }
164: }
165: }
166: PetscFree(values);
167: } else {
168: /* send values */
169: MPI_Send(x->array,xin->n,MPIU_SCALAR,0,tag,xin->comm);
170: }
171: PetscViewerFlush(viewer);
172: return(0);
173: }
175: int VecView_MPI_Binary(Vec xin,PetscViewer viewer)
176: {
177: Vec_MPI *x = (Vec_MPI*)xin->data;
178: int rank,ierr,len,work = xin->n,n,j,size,fdes,tag = ((PetscObject)viewer)->tag;
179: MPI_Status status;
180: Scalar *values;
181: FILE *file;
184: PetscViewerBinaryGetDescriptor(viewer,&fdes);
186: /* determine maximum message to arrive */
187: MPI_Comm_rank(xin->comm,&rank);
188: MPI_Reduce(&work,&len,1,MPI_INT,MPI_MAX,0,xin->comm);
189: MPI_Comm_size(xin->comm,&size);
191: if (!rank) {
192: PetscBinaryWrite(fdes,&xin->cookie,1,PETSC_INT,0);
193: PetscBinaryWrite(fdes,&xin->N,1,PETSC_INT,0);
194: PetscBinaryWrite(fdes,x->array,xin->n,PETSC_SCALAR,0);
196: PetscMalloc((len+1)*sizeof(Scalar),&values);
197: /* receive and print messages */
198: for (j=1; j<size; j++) {
199: MPI_Recv(values,len,MPIU_SCALAR,j,tag,xin->comm,&status);
200: MPI_Get_count(&status,MPIU_SCALAR,&n);
201: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,0);
202: }
203: PetscFree(values);
204: PetscViewerBinaryGetInfoPointer(viewer,&file);
205: if (file && xin->bs > 1) {
206: fprintf(file,"-vecload_block_size %dn",xin->bs);
207: }
208: } else {
209: /* send values */
210: MPI_Send(x->array,xin->n,MPIU_SCALAR,0,tag,xin->comm);
211: }
212: return(0);
213: }
215: int VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
216: {
217: Vec_MPI *x = (Vec_MPI*)xin->data;
218: int i,rank,size,N = xin->N,*lens,ierr;
219: PetscDraw draw;
220: PetscReal *xx,*yy;
221: PetscDrawLG lg;
222: PetscTruth isnull;
225: PetscViewerDrawGetDraw(viewer,0,&draw);
226: PetscDrawIsNull(draw,&isnull);
227: if (isnull) return(0);
229: PetscViewerDrawGetDrawLG(viewer,0,&lg);
230: PetscDrawCheckResizedWindow(draw);
231: MPI_Comm_rank(xin->comm,&rank);
232: MPI_Comm_size(xin->comm,&size);
233: if (!rank) {
234: PetscDrawLGReset(lg);
235: PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
236: for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
237: yy = xx + N;
238: PetscMalloc(size*sizeof(int),&lens);
239: for (i=0; i<size; i++) {
240: lens[i] = xin->map->range[i+1] - xin->map->range[i];
241: }
242: #if !defined(PETSC_USE_COMPLEX)
243: MPI_Gatherv(x->array,xin->n,MPI_DOUBLE,yy,lens,xin->map->range,MPI_DOUBLE,0,xin->comm);
244: #else
245: {
246: PetscReal *xr;
247: PetscMalloc((xin->n+1)*sizeof(PetscReal),&xr);
248: for (i=0; i<xin->n; i++) {
249: xr[i] = PetscRealPart(x->array[i]);
250: }
251: MPI_Gatherv(xr,xin->n,MPI_DOUBLE,yy,lens,xin->map->range,MPI_DOUBLE,0,xin->comm);
252: PetscFree(xr);
253: }
254: #endif
255: PetscFree(lens);
256: PetscDrawLGAddPoints(lg,N,&xx,&yy);
257: PetscFree(xx);
258: } else {
259: #if !defined(PETSC_USE_COMPLEX)
260: MPI_Gatherv(x->array,xin->n,MPI_DOUBLE,0,0,0,MPI_DOUBLE,0,xin->comm);
261: #else
262: {
263: PetscReal *xr;
264: PetscMalloc((xin->n+1)*sizeof(PetscReal),&xr);
265: for (i=0; i<xin->n; i++) {
266: xr[i] = PetscRealPart(x->array[i]);
267: }
268: MPI_Gatherv(xr,xin->n,MPI_DOUBLE,0,0,0,MPI_DOUBLE,0,xin->comm);
269: PetscFree(xr);
270: }
271: #endif
272: }
273: PetscDrawLGDraw(lg);
274: PetscDrawSynchronizedFlush(draw);
275: return(0);
276: }
278: EXTERN_C_BEGIN
279: int VecView_MPI_Draw(Vec xin,PetscViewer viewer)
280: {
281: Vec_MPI *x = (Vec_MPI*)xin->data;
282: int i,rank,size,ierr,start,end,tag = ((PetscObject)viewer)->tag;
283: MPI_Status status;
284: PetscReal coors[4],ymin,ymax,xmin,xmax,tmp;
285: PetscDraw draw;
286: PetscTruth isnull;
287: PetscDrawAxis axis;
290: PetscViewerDrawGetDraw(viewer,0,&draw);
291: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
293: PetscDrawCheckResizedWindow(draw);
294: xmin = 1.e20; xmax = -1.e20;
295: for (i=0; i<xin->n; i++) {
296: #if defined(PETSC_USE_COMPLEX)
297: if (PetscRealPart(x->array[i]) < xmin) xmin = PetscRealPart(x->array[i]);
298: if (PetscRealPart(x->array[i]) > xmax) xmax = PetscRealPart(x->array[i]);
299: #else
300: if (x->array[i] < xmin) xmin = x->array[i];
301: if (x->array[i] > xmax) xmax = x->array[i];
302: #endif
303: }
304: if (xmin + 1.e-10 > xmax) {
305: xmin -= 1.e-5;
306: xmax += 1.e-5;
307: }
308: MPI_Reduce(&xmin,&ymin,1,MPI_DOUBLE,MPI_MIN,0,xin->comm);
309: MPI_Reduce(&xmax,&ymax,1,MPI_DOUBLE,MPI_MAX,0,xin->comm);
310: MPI_Comm_size(xin->comm,&size);
311: MPI_Comm_rank(xin->comm,&rank);
312: PetscDrawAxisCreate(draw,&axis);
313: PetscLogObjectParent(draw,axis);
314: if (!rank) {
315: PetscDrawClear(draw);
316: PetscDrawFlush(draw);
317: PetscDrawAxisSetLimits(axis,0.0,(double)xin->N,ymin,ymax);
318: PetscDrawAxisDraw(axis);
319: PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
320: }
321: PetscDrawAxisDestroy(axis);
322: MPI_Bcast(coors,4,MPI_DOUBLE,0,xin->comm);
323: if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
324: /* draw local part of vector */
325: VecGetOwnershipRange(xin,&start,&end);
326: if (rank < size-1) { /*send value to right */
327: MPI_Send(&x->array[xin->n-1],1,MPI_DOUBLE,rank+1,tag,xin->comm);
328: }
329: for (i=1; i<xin->n; i++) {
330: #if !defined(PETSC_USE_COMPLEX)
331: PetscDrawLine(draw,(PetscReal)(i-1+start),x->array[i-1],(PetscReal)(i+start),
332: x->array[i],PETSC_DRAW_RED);
333: #else
334: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(x->array[i-1]),(PetscReal)(i+start),
335: PetscRealPart(x->array[i]),PETSC_DRAW_RED);
336: #endif
337: }
338: if (rank) { /* receive value from right */
339: MPI_Recv(&tmp,1,MPI_DOUBLE,rank-1,tag,xin->comm,&status);
340: #if !defined(PETSC_USE_COMPLEX)
341: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,x->array[0],PETSC_DRAW_RED);
342: #else
343: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(x->array[0]),PETSC_DRAW_RED);
344: #endif
345: }
346: PetscDrawSynchronizedFlush(draw);
347: PetscDrawPause(draw);
348: return(0);
349: }
350: EXTERN_C_END
352: int VecView_MPI_Socket(Vec xin,PetscViewer viewer)
353: {
354: Vec_MPI *x = (Vec_MPI*)xin->data;
355: int i,rank,size,N = xin->N,*lens,ierr;
356: Scalar *xx;
359: MPI_Comm_rank(xin->comm,&rank);
360: MPI_Comm_size(xin->comm,&size);
361: if (!rank) {
362: PetscMalloc((N+1)*sizeof(Scalar),&xx);
363: PetscMalloc(size*sizeof(int),&lens);
364: for (i=0; i<size; i++) {
365: lens[i] = xin->map->range[i+1] - xin->map->range[i];
366: }
367: MPI_Gatherv(x->array,xin->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,xin->comm);
368: PetscFree(lens);
369: PetscViewerSocketPutScalar(viewer,N,1,xx);
370: PetscFree(xx);
371: } else {
372: MPI_Gatherv(x->array,xin->n,MPI_DOUBLE,0,0,0,MPI_DOUBLE,0,xin->comm);
373: }
374: return(0);
375: }
377: int VecView_MPI(Vec xin,PetscViewer viewer)
378: {
379: int ierr;
380: PetscTruth isascii,issocket,isbinary,isdraw;
383: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
384: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
385: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
386: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
387: if (isascii){
388: VecView_MPI_ASCII(xin,viewer);
389: } else if (issocket) {
390: VecView_MPI_Socket(xin,viewer);
391: } else if (isbinary) {
392: VecView_MPI_Binary(xin,viewer);
393: } else if (isdraw) {
394: PetscViewerFormat format;
396: PetscViewerGetFormat(viewer,&format);
397: if (format == PETSC_VIEWER_DRAW_LG) {
398: VecView_MPI_Draw_LG(xin,viewer);
399: } else {
400: VecView_MPI_Draw(xin,viewer);
401: }
402: } else {
403: SETERRQ1(1,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
404: }
405: return(0);
406: }
408: int VecGetSize_MPI(Vec xin,int *N)
409: {
411: *N = xin->N;
412: return(0);
413: }
415: int VecSetValues_MPI(Vec xin,int ni,const int ix[],const Scalar y[],InsertMode addv)
416: {
417: Vec_MPI *x = (Vec_MPI *)xin->data;
418: int rank = x->rank,*owners = xin->map->range,start = owners[rank];
419: int end = owners[rank+1],i,row,ierr;
420: Scalar *xx = x->array;
423: #if defined(PETSC_USE_BOPT_g)
424: if (x->insertmode == INSERT_VALUES && addv == ADD_VALUES) {
425: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
426: } else if (x->insertmode == ADD_VALUES && addv == INSERT_VALUES) {
427: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
428: }
429: #endif
430: x->insertmode = addv;
432: if (addv == INSERT_VALUES) {
433: for (i=0; i<ni; i++) {
434: if ((row = ix[i]) >= start && row < end) {
435: xx[row-start] = y[i];
436: } else if (!x->donotstash) {
437: if (ix[i] < 0) continue;
438: #if defined(PETSC_USE_BOPT_g)
439: if (ix[i] >= xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %d maximum %d",ix[i],xin->N);
440: #endif
441: VecStashValue_Private(&xin->stash,row,y[i]);
442: }
443: }
444: } else {
445: for (i=0; i<ni; i++) {
446: if ((row = ix[i]) >= start && row < end) {
447: xx[row-start] += y[i];
448: } else if (!x->donotstash) {
449: if (ix[i] < 0) continue;
450: #if defined(PETSC_USE_BOPT_g)
451: if (ix[i] > xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %d maximum %d",ix[i],xin->N);
452: #endif
453: VecStashValue_Private(&xin->stash,row,y[i]);
454: }
455: }
456: }
457: return(0);
458: }
460: int VecSetValuesBlocked_MPI(Vec xin,int ni,const int ix[],const Scalar yin[],InsertMode addv)
461: {
462: Vec_MPI *x = (Vec_MPI *)xin->data;
463: int rank = x->rank,*owners = xin->map->range,start = owners[rank];
464: int end = owners[rank+1],i,row,bs = xin->bs,j,ierr;
465: Scalar *xx = x->array,*y = (Scalar*)yin;
468: #if defined(PETSC_USE_BOPT_g)
469: if (x->insertmode == INSERT_VALUES && addv == ADD_VALUES) {
470: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
471: }
472: else if (x->insertmode == ADD_VALUES && addv == INSERT_VALUES) {
473: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
474: }
475: #endif
476: x->insertmode = addv;
478: if (addv == INSERT_VALUES) {
479: for (i=0; i<ni; i++) {
480: if ((row = bs*ix[i]) >= start && row < end) {
481: for (j=0; j<bs; j++) {
482: xx[row-start+j] = y[j];
483: }
484: } else if (!x->donotstash) {
485: if (ix[i] < 0) continue;
486: #if defined(PETSC_USE_BOPT_g)
487: if (ix[i] >= xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %d max %d",ix[i],xin->N);
488: #endif
489: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
490: }
491: y += bs;
492: }
493: } else {
494: for (i=0; i<ni; i++) {
495: if ((row = bs*ix[i]) >= start && row < end) {
496: for (j=0; j<bs; j++) {
497: xx[row-start+j] += y[j];
498: }
499: } else if (!x->donotstash) {
500: if (ix[i] < 0) continue;
501: #if defined(PETSC_USE_BOPT_g)
502: if (ix[i] > xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %d max %d",ix[i],xin->N);
503: #endif
504: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
505: }
506: y += bs;
507: }
508: }
509: return(0);
510: }
512: /*
513: Since nsends or nreceives may be zero we add 1 in certain mallocs
514: to make sure we never malloc an empty one.
515: */
516: int VecAssemblyBegin_MPI(Vec xin)
517: {
518: Vec_MPI *x = (Vec_MPI *)xin->data;
519: int *owners = xin->map->range,*bowners,ierr,size,i,bs,nstash,reallocs;
520: InsertMode addv;
521: MPI_Comm comm = xin->comm;
524: if (x->donotstash) {
525: return(0);
526: }
528: MPI_Allreduce(&x->insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
529: if (addv == (ADD_VALUES|INSERT_VALUES)) {
530: SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
531: }
532: x->insertmode = addv; /* in case this processor had no cache */
533:
534: bs = xin->bs;
535: MPI_Comm_size(xin->comm,&size);
536: if (!x->browners && xin->bs != -1) {
537: PetscMalloc((size+1)*sizeof(int),&bowners);
538: for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
539: x->browners = bowners;
540: } else {
541: bowners = x->browners;
542: }
543: VecStashScatterBegin_Private(&xin->stash,owners);
544: VecStashScatterBegin_Private(&xin->bstash,bowners);
545: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
546: PetscLogInfo(0,"VecAssemblyBegin_MPI:Stash has %d entries, uses %d mallocs.n",nstash,reallocs);
547: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
548: PetscLogInfo(0,"VecAssemblyBegin_MPI:Block-Stash has %d entries, uses %d mallocs.n",nstash,reallocs);
550: return(0);
551: }
553: int VecAssemblyEnd_MPI(Vec vec)
554: {
555: Vec_MPI *x = (Vec_MPI *)vec->data;
556: int ierr,base,i,j,n,*row,flg,bs;
557: Scalar *val,*vv,*array;
560: if (!x->donotstash) {
561: base = vec->map->range[x->rank];
562: bs = vec->bs;
564: /* Process the stash */
565: while (1) {
566: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
567: if (!flg) break;
568: if (x->insertmode == ADD_VALUES) {
569: for (i=0; i<n; i++) { x->array[row[i] - base] += val[i]; }
570: } else if (x->insertmode == INSERT_VALUES) {
571: for (i=0; i<n; i++) { x->array[row[i] - base] = val[i]; }
572: } else {
573: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
574: }
575: }
576: VecStashScatterEnd_Private(&vec->stash);
578: /* now process the block-stash */
579: while (1) {
580: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
581: if (!flg) break;
582: for (i=0; i<n; i++) {
583: array = x->array+row[i]*bs-base;
584: vv = val+i*bs;
585: if (x->insertmode == ADD_VALUES) {
586: for (j=0; j<bs; j++) { array[j] += vv[j];}
587: } else if (x->insertmode == INSERT_VALUES) {
588: for (j=0; j<bs; j++) { array[j] = vv[j]; }
589: } else {
590: SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
591: }
592: }
593: }
594: VecStashScatterEnd_Private(&vec->bstash);
595: }
596: x->insertmode = NOT_SET_VALUES;
597: return(0);
598: }