Actual source code: vscat.c
1: /*
2: Code for creating scatters between vectors. This file
3: includes the code for scattering between sequential vectors and
4: some special cases for parallel scatters.
5: */
7: #include src/vec/is/isimpl.h
8: #include vecimpl.h
10: /* Logging support */
11: PetscCookie VEC_SCATTER_COOKIE = 0;
13: #if defined(PETSC_USE_BOPT_g)
14: /*
15: Checks if any indices are less than zero and generates an error
16: */
19: static PetscErrorCode VecScatterCheckIndices_Private(PetscInt nmax,PetscInt n,PetscInt *idx)
20: {
21: PetscInt i;
24: for (i=0; i<n; i++) {
25: if (idx[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Negative index %D at %D location",idx[i],i);
26: if (idx[i] >= nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Index %D at %D location greater than max %D",idx[i],i,nmax);
27: }
28: return(0);
29: }
30: #endif
32: /*
33: This is special scatter code for when the entire parallel vector is
34: copied to each processor.
36: This code was written by Cameron Cooper, Occidental College, Fall 1995,
37: will working at ANL as a SERS student.
38: */
41: PetscErrorCode VecScatterBegin_MPI_ToAll(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
42: {
43: #if defined(PETSC_USE_64BIT_INT)
45: SETERRQ(PETSC_ERR_SUP,"Not implemented due to limited MPI support for extremely long gathers");
46: #else
48: PetscInt yy_n,xx_n,*range;
49: PetscScalar *xv,*yv;
50: PetscMap map;
53: VecGetLocalSize(y,&yy_n);
54: VecGetLocalSize(x,&xx_n);
55: VecGetArray(y,&yv);
56: if (x != y) {VecGetArray(x,&xv);}
58: if (mode & SCATTER_REVERSE) {
59: PetscScalar *xvt,*xvt2;
60: VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
61: PetscInt i;
63: if (addv == INSERT_VALUES) {
64: PetscInt rstart,rend;
65: /*
66: copy the correct part of the local vector into the local storage of
67: the MPI one. Note: this operation only makes sense if all the local
68: vectors have the same values
69: */
70: VecGetOwnershipRange(y,&rstart,&rend);
71: PetscMemcpy(yv,xv+rstart,yy_n*sizeof(PetscScalar));
72: } else {
73: MPI_Comm comm;
74: PetscMPIInt rank;
75: PetscObjectGetComm((PetscObject)y,&comm);
76: MPI_Comm_rank(comm,&rank);
77: if (scat->work1) xvt = scat->work1;
78: else {
79: PetscMalloc(xx_n*sizeof(PetscScalar),&xvt);
80: scat->work1 = xvt;
81: }
82: if (!rank) { /* I am the zeroth processor, values are accumulated here */
83: if (scat->work2) xvt2 = scat->work2;
84: else {
85: PetscMalloc(xx_n*sizeof(PetscScalar),& xvt2);
86: scat->work2 = xvt2;
87: }
88: VecGetPetscMap(y,&map);
89: PetscMapGetGlobalRange(map,&range);
90: MPI_Gatherv(yv,yy_n,MPIU_SCALAR,xvt2,scat->count,range,MPIU_SCALAR,0,ctx->comm);
91: #if defined(PETSC_USE_COMPLEX)
92: MPI_Reduce(xv,xvt,2*xx_n,MPIU_REAL,MPI_SUM,0,ctx->comm);
93: #else
94: MPI_Reduce(xv,xvt,xx_n,MPIU_SCALAR,MPI_SUM,0,ctx->comm);
95: #endif
96: if (addv == ADD_VALUES) {
97: for (i=0; i<xx_n; i++) {
98: xvt[i] += xvt2[i];
99: }
100: #if !defined(PETSC_USE_COMPLEX)
101: } else if (addv == MAX_VALUES) {
102: for (i=0; i<xx_n; i++) {
103: xvt[i] = PetscMax(xvt[i],xvt2[i]);
104: }
105: #endif
106: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
107: MPI_Scatterv(xvt,scat->count,map->range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
108: } else {
109: VecGetPetscMap(y,&map);
110: PetscMapGetGlobalRange(map,&range);
111: MPI_Gatherv(yv,yy_n,MPIU_SCALAR,0, 0,0,MPIU_SCALAR,0,ctx->comm);
112: #if defined(PETSC_USE_COMPLEX)
113: MPI_Reduce(xv,xvt,2*xx_n,MPIU_REAL,MPI_SUM,0,ctx->comm);
114: #else
115: MPI_Reduce(xv,xvt,xx_n,MPIU_SCALAR,MPI_SUM,0,ctx->comm);
116: #endif
117: MPI_Scatterv(0,scat->count,range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
118: }
119: }
120: } else {
121: PetscScalar *yvt;
122: VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
123: PetscInt i;
125: VecGetPetscMap(x,&map);
126: PetscMapGetGlobalRange(map,&range);
127: if (addv == INSERT_VALUES) {
128: MPI_Allgatherv(xv,xx_n,MPIU_SCALAR,yv,scat->count,range,MPIU_SCALAR,ctx->comm);
129: } else {
130: if (scat->work1) yvt = scat->work1;
131: else {
132: PetscMalloc(yy_n*sizeof(PetscScalar),&yvt);
133: scat->work1 = yvt;
134: }
135: MPI_Allgatherv(xv,xx_n,MPIU_SCALAR,yvt,scat->count,map->range,MPIU_SCALAR,ctx->comm);
136: if (addv == ADD_VALUES){
137: for (i=0; i<yy_n; i++) {
138: yv[i] += yvt[i];
139: }
140: #if !defined(PETSC_USE_COMPLEX)
141: } else if (addv == MAX_VALUES) {
142: for (i=0; i<yy_n; i++) {
143: yv[i] = PetscMax(yv[i],yvt[i]);
144: }
145: #endif
146: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
147: }
148: }
149: VecRestoreArray(y,&yv);
150: if (x != y) {VecRestoreArray(x,&xv);}
151: #endif
152: return(0);
153: }
155: /*
156: This is special scatter code for when the entire parallel vector is
157: copied to processor 0.
159: */
162: PetscErrorCode VecScatterBegin_MPI_ToOne(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
163: {
164: #if defined(PETSC_USE_64BIT_INT)
166: SETERRQ(PETSC_ERR_SUP,"Not implemented due to limited MPI support for extremely long gathers");
167: #else
169: PetscMPIInt rank;
170: PetscInt yy_n,xx_n,*range;
171: PetscScalar *xv,*yv;
172: MPI_Comm comm;
173: PetscMap map;
176: VecGetLocalSize(y,&yy_n);
177: VecGetLocalSize(x,&xx_n);
178: VecGetArray(x,&xv);
179: VecGetArray(y,&yv);
181: PetscObjectGetComm((PetscObject)x,&comm);
182: MPI_Comm_rank(comm,&rank);
184: /* -------- Reverse scatter; spread from processor 0 to other processors */
185: if (mode & SCATTER_REVERSE) {
186: PetscScalar *yvt;
187: VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
188: PetscInt i;
190: VecGetPetscMap(y,&map);
191: PetscMapGetGlobalRange(map,&range);
192: if (addv == INSERT_VALUES) {
193: MPI_Scatterv(xv,scat->count,range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
194: } else {
195: if (scat->work2) yvt = scat->work2;
196: else {
197: PetscMalloc(xx_n*sizeof(PetscScalar),&yvt);
198: scat->work2 = yvt;
199: }
200: MPI_Scatterv(xv,scat->count,range,MPIU_SCALAR,yvt,yy_n,MPIU_SCALAR,0,ctx->comm);
201: if (addv == ADD_VALUES) {
202: for (i=0; i<yy_n; i++) {
203: yv[i] += yvt[i];
204: }
205: #if !defined(PETSC_USE_COMPLEX)
206: } else if (addv == MAX_VALUES) {
207: for (i=0; i<yy_n; i++) {
208: yv[i] = PetscMax(yv[i],yvt[i]);
209: }
210: #endif
211: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
212: }
213: /* --------- Forward scatter; gather all values onto processor 0 */
214: } else {
215: PetscScalar *yvt = 0;
216: VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
217: PetscInt i;
219: VecGetPetscMap(x,&map);
220: PetscMapGetGlobalRange(map,&range);
221: if (addv == INSERT_VALUES) {
222: MPI_Gatherv(xv,xx_n,MPIU_SCALAR,yv,scat->count,range,MPIU_SCALAR,0,ctx->comm);
223: } else {
224: if (!rank) {
225: if (scat->work1) yvt = scat->work1;
226: else {
227: PetscMalloc(yy_n*sizeof(PetscScalar),&yvt);
228: scat->work1 = yvt;
229: }
230: }
231: MPI_Gatherv(xv,xx_n,MPIU_SCALAR,yvt,scat->count,range,MPIU_SCALAR,0,ctx->comm);
232: if (!rank) {
233: if (addv == ADD_VALUES) {
234: for (i=0; i<yy_n; i++) {
235: yv[i] += yvt[i];
236: }
237: #if !defined(PETSC_USE_COMPLEX)
238: } else if (addv == MAX_VALUES) {
239: for (i=0; i<yy_n; i++) {
240: yv[i] = PetscMax(yv[i],yvt[i]);
241: }
242: #endif
243: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
244: }
245: }
246: }
247: VecRestoreArray(x,&xv);
248: VecRestoreArray(y,&yv);
249: #endif
250: return(0);
251: }
253: /*
254: The follow to are used for both VecScatterBegin_MPI_ToAll() and VecScatterBegin_MPI_ToOne()
255: */
258: PetscErrorCode VecScatterDestroy_MPI_ToAll(VecScatter ctx)
259: {
260: VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
261: PetscErrorCode ierr;
264: if (scat->work1) {PetscFree(scat->work1);}
265: if (scat->work2) {PetscFree(scat->work2);}
266: PetscFree2(ctx->todata,scat->count);
267: PetscHeaderDestroy(ctx);
268: return(0);
269: }
273: PetscErrorCode VecScatterDestroy_SGtoSG(VecScatter ctx)
274: {
275: PetscErrorCode ierr;
278: PetscFree4(ctx->todata,((VecScatter_Seq_General*)ctx->todata)->slots,ctx->fromdata,((VecScatter_Seq_General*)ctx->fromdata)->slots);
279: PetscHeaderDestroy(ctx);
280: return(0);
281: }
285: PetscErrorCode VecScatterDestroy_SGtoSS(VecScatter ctx)
286: {
287: PetscErrorCode ierr;
290: PetscFree3(ctx->todata,ctx->fromdata,((VecScatter_Seq_General*)ctx->fromdata)->slots);
291: PetscHeaderDestroy(ctx);
292: return(0);
293: }
297: PetscErrorCode VecScatterDestroy_SStoSG(VecScatter ctx)
298: {
299: PetscErrorCode ierr;
302: PetscFree3(ctx->todata,((VecScatter_Seq_General*)ctx->todata)->slots,ctx->fromdata);
303: PetscHeaderDestroy(ctx);
304: return(0);
305: }
309: PetscErrorCode VecScatterDestroy_SStoSS(VecScatter ctx)
310: {
314: PetscFree2(ctx->todata,ctx->fromdata);
315: PetscHeaderDestroy(ctx);
316: return(0);
317: }
319: /* -------------------------------------------------------------------------*/
322: PetscErrorCode VecScatterCopy_MPI_ToAll(VecScatter in,VecScatter out)
323: {
324: VecScatter_MPI_ToAll *in_to = (VecScatter_MPI_ToAll*)in->todata,*sto;
325: PetscErrorCode ierr;
326: PetscMPIInt size;
327: PetscInt i;
330: out->postrecvs = 0;
331: out->begin = in->begin;
332: out->end = in->end;
333: out->copy = in->copy;
334: out->destroy = in->destroy;
335: out->view = in->view;
337: MPI_Comm_size(out->comm,&size);
338: PetscMalloc2(1,VecScatter_MPI_ToAll,&sto,size,PetscMPIInt,&sto->count);
339: sto->type = in_to->type;
340: for (i=0; i<size; i++) {
341: sto->count[i] = in_to->count[i];
342: }
343: sto->work1 = 0;
344: sto->work2 = 0;
345: out->todata = (void*)sto;
346: out->fromdata = (void*)0;
347: return(0);
348: }
350: /* --------------------------------------------------------------------------------------*/
351: /*
352: Scatter: sequential general to sequential general
353: */
356: PetscErrorCode VecScatterBegin_SGtoSG(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
357: {
358: VecScatter_Seq_General *gen_to = (VecScatter_Seq_General*)ctx->todata;
359: VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
360: PetscErrorCode ierr;
361: PetscInt i,n = gen_from->n,*fslots,*tslots;
362: PetscScalar *xv,*yv;
363:
365: VecGetArray(x,&xv);
366: if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
367: if (mode & SCATTER_REVERSE){
368: gen_to = (VecScatter_Seq_General*)ctx->fromdata;
369: gen_from = (VecScatter_Seq_General*)ctx->todata;
370: }
371: fslots = gen_from->slots;
372: tslots = gen_to->slots;
374: if (addv == INSERT_VALUES) {
375: for (i=0; i<n; i++) {yv[tslots[i]] = xv[fslots[i]];}
376: } else if (addv == ADD_VALUES) {
377: for (i=0; i<n; i++) {yv[tslots[i]] += xv[fslots[i]];}
378: #if !defined(PETSC_USE_COMPLEX)
379: } else if (addv == MAX_VALUES) {
380: for (i=0; i<n; i++) {yv[tslots[i]] = PetscMax(yv[tslots[i]],xv[fslots[i]]);}
381: #endif
382: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
383: VecRestoreArray(x,&xv);
384: if (x != y) {VecRestoreArray(y,&yv);}
385: return(0);
386: }
388: /*
389: Scatter: sequential general to sequential stride 1
390: */
393: PetscErrorCode VecScatterBegin_SGtoSS_Stride1(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
394: {
395: VecScatter_Seq_Stride *gen_to = (VecScatter_Seq_Stride*)ctx->todata;
396: VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
397: PetscInt i,n = gen_from->n,*fslots = gen_from->slots;
398: PetscErrorCode ierr;
399: PetscInt first = gen_to->first;
400: PetscScalar *xv,*yv;
401:
403: VecGetArray(x,&xv);
404: if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
405: if (mode & SCATTER_REVERSE){
406: xv += first;
407: if (addv == INSERT_VALUES) {
408: for (i=0; i<n; i++) {yv[fslots[i]] = xv[i];}
409: } else if (addv == ADD_VALUES) {
410: for (i=0; i<n; i++) {yv[fslots[i]] += xv[i];}
411: #if !defined(PETSC_USE_COMPLEX)
412: } else if (addv == MAX_VALUES) {
413: for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[i]);}
414: #endif
415: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
416: } else {
417: yv += first;
418: if (addv == INSERT_VALUES) {
419: for (i=0; i<n; i++) {yv[i] = xv[fslots[i]];}
420: } else if (addv == ADD_VALUES) {
421: for (i=0; i<n; i++) {yv[i] += xv[fslots[i]];}
422: #if !defined(PETSC_USE_COMPLEX)
423: } else if (addv == MAX_VALUES) {
424: for (i=0; i<n; i++) {yv[i] = PetscMax(yv[i],xv[fslots[i]]);}
425: #endif
426: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
427: }
428: VecRestoreArray(x,&xv);
429: if (x != y) {VecRestoreArray(y,&yv);}
430: return(0);
431: }
433: /*
434: Scatter: sequential general to sequential stride
435: */
438: PetscErrorCode VecScatterBegin_SGtoSS(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
439: {
440: VecScatter_Seq_Stride *gen_to = (VecScatter_Seq_Stride*)ctx->todata;
441: VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
442: PetscInt i,n = gen_from->n,*fslots = gen_from->slots;
443: PetscErrorCode ierr;
444: PetscInt first = gen_to->first,step = gen_to->step;
445: PetscScalar *xv,*yv;
446:
448: VecGetArray(x,&xv);
449: if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
451: if (mode & SCATTER_REVERSE){
452: if (addv == INSERT_VALUES) {
453: for (i=0; i<n; i++) {yv[fslots[i]] = xv[first + i*step];}
454: } else if (addv == ADD_VALUES) {
455: for (i=0; i<n; i++) {yv[fslots[i]] += xv[first + i*step];}
456: #if !defined(PETSC_USE_COMPLEX)
457: } else if (addv == MAX_VALUES) {
458: for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[first + i*step]);}
459: #endif
460: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
461: } else {
462: if (addv == INSERT_VALUES) {
463: for (i=0; i<n; i++) {yv[first + i*step] = xv[fslots[i]];}
464: } else if (addv == ADD_VALUES) {
465: for (i=0; i<n; i++) {yv[first + i*step] += xv[fslots[i]];}
466: #if !defined(PETSC_USE_COMPLEX)
467: } else if (addv == MAX_VALUES) {
468: for (i=0; i<n; i++) {yv[first + i*step] = PetscMax(yv[first + i*step],xv[fslots[i]]);}
469: #endif
470: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
471: }
472: VecRestoreArray(x,&xv);
473: if (x != y) {VecRestoreArray(y,&yv);}
474: return(0);
475: }
477: /*
478: Scatter: sequential stride 1 to sequential general
479: */
482: PetscErrorCode VecScatterBegin_SStoSG_Stride1(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
483: {
484: VecScatter_Seq_Stride *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
485: VecScatter_Seq_General *gen_to = (VecScatter_Seq_General*)ctx->todata;
486: PetscInt i,n = gen_from->n,*fslots = gen_to->slots;
487: PetscErrorCode ierr;
488: PetscInt first = gen_from->first;
489: PetscScalar *xv,*yv;
490:
492: VecGetArray(x,&xv);
493: if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
495: if (mode & SCATTER_REVERSE){
496: yv += first;
497: if (addv == INSERT_VALUES) {
498: for (i=0; i<n; i++) {yv[i] = xv[fslots[i]];}
499: } else if (addv == ADD_VALUES) {
500: for (i=0; i<n; i++) {yv[i] += xv[fslots[i]];}
501: #if !defined(PETSC_USE_COMPLEX)
502: } else if (addv == MAX_VALUES) {
503: for (i=0; i<n; i++) {yv[i] = PetscMax(yv[i],xv[fslots[i]]);}
504: #endif
505: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
506: } else {
507: xv += first;
508: if (addv == INSERT_VALUES) {
509: for (i=0; i<n; i++) {yv[fslots[i]] = xv[i];}
510: } else if (addv == ADD_VALUES) {
511: for (i=0; i<n; i++) {yv[fslots[i]] += xv[i];}
512: #if !defined(PETSC_USE_COMPLEX)
513: } else if (addv == MAX_VALUES) {
514: for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[i]);}
515: #endif
516: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
517: }
518: VecRestoreArray(x,&xv);
519: if (x != y) {VecRestoreArray(y,&yv);}
520: return(0);
521: }
525: /*
526: Scatter: sequential stride to sequential general
527: */
528: PetscErrorCode VecScatterBegin_SStoSG(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
529: {
530: VecScatter_Seq_Stride *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
531: VecScatter_Seq_General *gen_to = (VecScatter_Seq_General*)ctx->todata;
532: PetscInt i,n = gen_from->n,*fslots = gen_to->slots;
533: PetscErrorCode ierr;
534: PetscInt first = gen_from->first,step = gen_from->step;
535: PetscScalar *xv,*yv;
536:
538: VecGetArray(x,&xv);
539: if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
541: if (mode & SCATTER_REVERSE){
542: if (addv == INSERT_VALUES) {
543: for (i=0; i<n; i++) {yv[first + i*step] = xv[fslots[i]];}
544: } else if (addv == ADD_VALUES) {
545: for (i=0; i<n; i++) {yv[first + i*step] += xv[fslots[i]];}
546: #if !defined(PETSC_USE_COMPLEX)
547: } else if (addv == MAX_VALUES) {
548: for (i=0; i<n; i++) {yv[first + i*step] = PetscMax(yv[first + i*step],xv[fslots[i]]);}
549: #endif
550: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
551: } else {
552: if (addv == INSERT_VALUES) {
553: for (i=0; i<n; i++) {yv[fslots[i]] = xv[first + i*step];}
554: } else if (addv == ADD_VALUES) {
555: for (i=0; i<n; i++) {yv[fslots[i]] += xv[first + i*step];}
556: #if !defined(PETSC_USE_COMPLEX)
557: } else if (addv == MAX_VALUES) {
558: for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[first + i*step]);}
559: #endif
560: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
561: }
562: VecRestoreArray(x,&xv);
563: if (x != y) {VecRestoreArray(y,&yv);}
564: return(0);
565: }
567: /*
568: Scatter: sequential stride to sequential stride
569: */
572: PetscErrorCode VecScatterBegin_SStoSS(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
573: {
574: VecScatter_Seq_Stride *gen_to = (VecScatter_Seq_Stride*)ctx->todata;
575: VecScatter_Seq_Stride *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
576: PetscInt i,n = gen_from->n,to_first = gen_to->first,to_step = gen_to->step;
577: PetscErrorCode ierr;
578: PetscInt from_first = gen_from->first,from_step = gen_from->step;
579: PetscScalar *xv,*yv;
580:
582: VecGetArray(x,&xv);
583: if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
585: if (mode & SCATTER_REVERSE){
586: from_first = gen_to->first;
587: to_first = gen_from->first;
588: from_step = gen_to->step;
589: to_step = gen_from->step;
590: }
592: if (addv == INSERT_VALUES) {
593: if (to_step == 1 && from_step == 1) {
594: PetscMemcpy(yv+to_first,xv+from_first,n*sizeof(PetscScalar));
595: } else {
596: for (i=0; i<n; i++) {
597: yv[to_first + i*to_step] = xv[from_first+i*from_step];
598: }
599: }
600: } else if (addv == ADD_VALUES) {
601: if (to_step == 1 && from_step == 1) {
602: yv += to_first; xv += from_first;
603: for (i=0; i<n; i++) {
604: yv[i] += xv[i];
605: }
606: } else {
607: for (i=0; i<n; i++) {
608: yv[to_first + i*to_step] += xv[from_first+i*from_step];
609: }
610: }
611: #if !defined(PETSC_USE_COMPLEX)
612: } else if (addv == MAX_VALUES) {
613: if (to_step == 1 && from_step == 1) {
614: yv += to_first; xv += from_first;
615: for (i=0; i<n; i++) {
616: yv[i] = PetscMax(yv[i],xv[i]);
617: }
618: } else {
619: for (i=0; i<n; i++) {
620: yv[to_first + i*to_step] = PetscMax(yv[to_first + i*to_step],xv[from_first+i*from_step]);
621: }
622: }
623: #endif
624: } else {SETERRQ(PETSC_ERR_ARG_UNKNOWN_TYPE,"Wrong insert option");}
625: VecRestoreArray(x,&xv);
626: if (x != y) {VecRestoreArray(y,&yv);}
627: return(0);
628: }
630: /* --------------------------------------------------------------------------*/
635: PetscErrorCode VecScatterCopy_SGToSG(VecScatter in,VecScatter out)
636: {
637: PetscErrorCode ierr;
638: VecScatter_Seq_General *in_to = (VecScatter_Seq_General*)in->todata,*out_to;
639: VecScatter_Seq_General *in_from = (VecScatter_Seq_General*)in->fromdata,*out_from;
640:
642: out->postrecvs = 0;
643: out->begin = in->begin;
644: out->end = in->end;
645: out->copy = in->copy;
646: out->destroy = in->destroy;
647: out->view = in->view;
649: PetscMalloc4(1,VecScatter_Seq_General,&out_to,in_to->n,PetscInt,&out_to->slots,1,VecScatter_Seq_General,&out_from,in_from->n,PetscInt,&out_from->slots);
650: out_to->n = in_to->n;
651: out_to->type = in_to->type;
652: out_to->nonmatching_computed = PETSC_FALSE;
653: out_to->slots_nonmatching = 0;
654: out_to->is_copy = PETSC_FALSE;
655: PetscMemcpy(out_to->slots,in_to->slots,(out_to->n)*sizeof(PetscInt));
657: out_from->n = in_from->n;
658: out_from->type = in_from->type;
659: out_from->nonmatching_computed = PETSC_FALSE;
660: out_from->slots_nonmatching = 0;
661: out_from->is_copy = PETSC_FALSE;
662: PetscMemcpy(out_from->slots,in_from->slots,(out_from->n)*sizeof(PetscInt));
664: out->todata = (void*)out_to;
665: out->fromdata = (void*)out_from;
666: return(0);
667: }
672: PetscErrorCode VecScatterCopy_SGToSS(VecScatter in,VecScatter out)
673: {
674: PetscErrorCode ierr;
675: VecScatter_Seq_Stride *in_to = (VecScatter_Seq_Stride*)in->todata,*out_to;
676: VecScatter_Seq_General *in_from = (VecScatter_Seq_General*)in->fromdata,*out_from;
677:
679: out->postrecvs = 0;
680: out->begin = in->begin;
681: out->end = in->end;
682: out->copy = in->copy;
683: out->destroy = in->destroy;
684: out->view = in->view;
686: PetscMalloc3(1,VecScatter_Seq_Stride,&out_to,1,VecScatter_Seq_General,&out_from,in_from->n,PetscInt,&out_from->slots);
687: out_to->n = in_to->n;
688: out_to->type = in_to->type;
689: out_to->first = in_to->first;
690: out_to->step = in_to->step;
691: out_to->type = in_to->type;
693: out_from->n = in_from->n;
694: out_from->type = in_from->type;
695: out_from->nonmatching_computed = PETSC_FALSE;
696: out_from->slots_nonmatching = 0;
697: out_from->is_copy = PETSC_FALSE;
698: PetscMemcpy(out_from->slots,in_from->slots,(out_from->n)*sizeof(PetscInt));
700: out->todata = (void*)out_to;
701: out->fromdata = (void*)out_from;
702: return(0);
703: }
705: /* --------------------------------------------------------------------------*/
706: /*
707: Scatter: parallel to sequential vector, sequential strides for both.
708: */
711: PetscErrorCode VecScatterCopy_PStoSS(VecScatter in,VecScatter out)
712: {
713: VecScatter_Seq_Stride *in_to = (VecScatter_Seq_Stride*)in->todata,*out_to;
714: VecScatter_Seq_Stride *in_from = (VecScatter_Seq_Stride*)in->fromdata,*out_from;
715: PetscErrorCode ierr;
718: out->postrecvs = 0;
719: out->begin = in->begin;
720: out->end = in->end;
721: out->copy = in->copy;
722: out->destroy = in->destroy;
723: out->view = in->view;
725: PetscMalloc2(1,VecScatter_Seq_Stride,&out_to,1,VecScatter_Seq_Stride,&out_from);
726: out_to->n = in_to->n;
727: out_to->type = in_to->type;
728: out_to->first = in_to->first;
729: out_to->step = in_to->step;
730: out_to->type = in_to->type;
731: out_from->n = in_from->n;
732: out_from->type = in_from->type;
733: out_from->first = in_from->first;
734: out_from->step = in_from->step;
735: out_from->type = in_from->type;
736: out->todata = (void*)out_to;
737: out->fromdata = (void*)out_from;
738: return(0);
739: }
741: EXTERN PetscErrorCode VecScatterCreate_PtoS(PetscInt,PetscInt *,PetscInt,PetscInt *,Vec,Vec,PetscInt,VecScatter);
742: EXTERN PetscErrorCode VecScatterCreate_PtoP(PetscInt,PetscInt *,PetscInt,PetscInt *,Vec,Vec,VecScatter);
743: EXTERN PetscErrorCode VecScatterCreate_StoP(PetscInt,PetscInt *,PetscInt,PetscInt *,Vec,VecScatter);
745: /* =======================================================================*/
746: #define VEC_SEQ_ID 0
747: #define VEC_MPI_ID 1
751: /*@C
752: VecScatterCreate - Creates a vector scatter context.
754: Collective on Vec
756: Input Parameters:
757: + xin - a vector that defines the shape (parallel data layout of the vector)
758: of vectors from which we scatter
759: . yin - a vector that defines the shape (parallel data layout of the vector)
760: of vectors to which we scatter
761: . ix - the indices of xin to scatter (if PETSC_NULL scatters all values)
762: - iy - the indices of yin to hold results (if PETSC_NULL fills entire vector yin)
764: Output Parameter:
765: . newctx - location to store the new scatter context
767: Options Database Keys:
768: + -vecscatter_merge - Merges scatter send and receive (may offer better performance with some MPIs)
769: . -vecscatter_ssend - Uses MPI_Ssend_init() instead of MPI_Send_init() (may offer better performance with some MPIs)
770: . -vecscatter_sendfirst - Posts sends before receives (may offer better performance with some MPIs)
771: . -vecscatter_rr - use ready receiver mode for MPI sends in scatters (rarely used)
772: - -vecscatter_packtogether - Pack all messages before sending, receive all messages before unpacking
774: Level: intermediate
776: Notes:
777: In calls to VecScatter() you can use different vectors than the xin and
778: yin you used above; BUT they must have the same parallel data layout, for example,
779: they could be obtained from VecDuplicate().
780: A VecScatter context CANNOT be used in two or more simultaneous scatters;
781: that is you cannot call a second VecScatterBegin() with the same scatter
782: context until the VecScatterEnd() has been called on the first VecScatterBegin().
783: In this case a separate VecScatter is needed for each concurrent scatter.
785: Concepts: scatter^between vectors
786: Concepts: gather^between vectors
788: .seealso: VecScatterDestroy(), VecScatterCreateToAll(), VecScatterCreateToZero()
789: @*/
790: PetscErrorCode VecScatterCreate(Vec xin,IS ix,Vec yin,IS iy,VecScatter *newctx)
791: {
792: VecScatter ctx;
794: PetscMPIInt size;
795: PetscInt totalv,*range,xin_type = VEC_SEQ_ID,yin_type = VEC_SEQ_ID;
796: MPI_Comm comm,ycomm;
797: PetscTruth ixblock,iyblock,iystride,islocal,cando,flag;
798: IS tix = 0,tiy = 0;
802: /*
803: Determine if the vectors are "parallel", ie. it shares a comm with other processors, or
804: sequential (it does not share a comm). The difference is that parallel vectors treat the
805: index set as providing indices in the global parallel numbering of the vector, with
806: sequential vectors treat the index set as providing indices in the local sequential
807: numbering
808: */
809: PetscObjectGetComm((PetscObject)xin,&comm);
810: MPI_Comm_size(comm,&size);
811: if (size > 1) {xin_type = VEC_MPI_ID;}
813: PetscObjectGetComm((PetscObject)yin,&ycomm);
814: MPI_Comm_size(ycomm,&size);
815: if (size > 1) {comm = ycomm; yin_type = VEC_MPI_ID;}
816:
817: /* generate the Scatter context */
818: PetscHeaderCreate(ctx,_p_VecScatter,int,VEC_SCATTER_COOKIE,0,"VecScatter",comm,VecScatterDestroy,VecScatterView);
819: ctx->inuse = PETSC_FALSE;
821: ctx->beginandendtogether = PETSC_FALSE;
822: PetscOptionsHasName(PETSC_NULL,"-vecscatter_merge",&ctx->beginandendtogether);
823: if (ctx->beginandendtogether) {
824: PetscLogInfo(ctx,"VecScatterCreate:Using combined (merged) vector scatter begin and end\n");
825: }
826: PetscOptionsHasName(PETSC_NULL,"-vecscatter_packtogether",&ctx->packtogether);
827: if (ctx->packtogether) {
828: PetscLogInfo(ctx,"VecScatterCreate:Pack all messages before sending\n");
829: }
831: VecGetLocalSize(xin,&ctx->from_n);
832: VecGetLocalSize(yin,&ctx->to_n);
834: /*
835: if ix or iy is not included; assume just grabbing entire vector
836: */
837: if (!ix && xin_type == VEC_SEQ_ID) {
838: ISCreateStride(comm,ctx->from_n,0,1,&ix);
839: tix = ix;
840: } else if (!ix && xin_type == VEC_MPI_ID) {
841: if (yin_type == VEC_MPI_ID) {
842: PetscInt ntmp, low;
843: VecGetLocalSize(xin,&ntmp);
844: VecGetOwnershipRange(xin,&low,PETSC_NULL);
845: ISCreateStride(comm,ntmp,low,1,&ix);
846: } else{
847: PetscInt Ntmp;
848: VecGetSize(xin,&Ntmp);
849: ISCreateStride(comm,Ntmp,0,1,&ix);
850: }
851: tix = ix;
852: } else if (!ix) {
853: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"iy not given, but not Seq or MPI vector");
854: }
856: if (!iy && yin_type == VEC_SEQ_ID) {
857: ISCreateStride(comm,ctx->to_n,0,1,&iy);
858: tiy = iy;
859: } else if (!iy && yin_type == VEC_MPI_ID) {
860: if (xin_type == VEC_MPI_ID) {
861: PetscInt ntmp, low;
862: VecGetLocalSize(yin,&ntmp);
863: VecGetOwnershipRange(yin,&low,PETSC_NULL);
864: ISCreateStride(comm,ntmp,low,1,&iy);
865: } else{
866: PetscInt Ntmp;
867: VecGetSize(yin,&Ntmp);
868: ISCreateStride(comm,Ntmp,0,1,&iy);
869: }
870: tiy = iy;
871: } else if (!iy) {
872: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"iy not given, but not Seq or MPI vector");
873: }
875: /* ===========================================================================================================
876: Check for special cases
877: ==========================================================================================================*/
878: /* ---------------------------------------------------------------------------*/
879: if (xin_type == VEC_SEQ_ID && yin_type == VEC_SEQ_ID) {
880: if (ix->type == IS_GENERAL && iy->type == IS_GENERAL){
881: PetscInt nx,ny,*idx,*idy;
882: VecScatter_Seq_General *to,*from;
884: ISGetLocalSize(ix,&nx);
885: ISGetLocalSize(iy,&ny);
886: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
887: ISGetIndices(ix,&idx);
888: ISGetIndices(iy,&idy);
889: PetscMalloc4(1,VecScatter_Seq_General,&to,nx,PetscInt,&to->slots,1,VecScatter_Seq_General,&from,nx,PetscInt,&from->slots);
890: to->n = nx;
891: #if defined(PETSC_USE_BOPT_g)
892: VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
893: #endif
894: PetscMemcpy(to->slots,idy,nx*sizeof(PetscInt));
895: from->n = nx;
896: #if defined(PETSC_USE_BOPT_g)
897: VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
898: #endif
899: PetscMemcpy(from->slots,idx,nx*sizeof(PetscInt));
900: to->type = VEC_SCATTER_SEQ_GENERAL;
901: from->type = VEC_SCATTER_SEQ_GENERAL;
902: ctx->todata = (void*)to;
903: ctx->fromdata = (void*)from;
904: ctx->postrecvs = 0;
905: ctx->begin = VecScatterBegin_SGtoSG;
906: ctx->end = 0;
907: ctx->destroy = VecScatterDestroy_SGtoSG;
908: ctx->copy = VecScatterCopy_SGToSG;
909: PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector general scatter\n");
910: goto functionend;
911: } else if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
912: PetscInt nx,ny,to_first,to_step,from_first,from_step;
913: VecScatter_Seq_Stride *from8,*to8;
915: ISGetLocalSize(ix,&nx);
916: ISGetLocalSize(iy,&ny);
917: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
918: ISStrideGetInfo(iy,&to_first,&to_step);
919: ISStrideGetInfo(ix,&from_first,&from_step);
920: PetscMalloc2(1,VecScatter_Seq_Stride,&to8,1,VecScatter_Seq_Stride,&from8);
921: to8->n = nx;
922: to8->first = to_first;
923: to8->step = to_step;
924: from8->n = nx;
925: from8->first = from_first;
926: from8->step = from_step;
927: to8->type = VEC_SCATTER_SEQ_STRIDE;
928: from8->type = VEC_SCATTER_SEQ_STRIDE;
929: ctx->todata = (void*)to8;
930: ctx->fromdata = (void*)from8;
931: ctx->postrecvs = 0;
932: ctx->begin = VecScatterBegin_SStoSS;
933: ctx->end = 0;
934: ctx->destroy = VecScatterDestroy_SStoSS;
935: ctx->copy = 0;
936: PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector stride to stride\n");
937: goto functionend;
938: } else if (ix->type == IS_GENERAL && iy->type == IS_STRIDE){
939: PetscInt nx,ny,*idx,first,step;
940: VecScatter_Seq_General *from9;
941: VecScatter_Seq_Stride *to9;
943: ISGetLocalSize(ix,&nx);
944: ISGetIndices(ix,&idx);
945: ISGetLocalSize(iy,&ny);
946: ISStrideGetInfo(iy,&first,&step);
947: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
948: PetscMalloc3(1,VecScatter_Seq_Stride,&to9,1,VecScatter_Seq_General,&from9,nx,PetscInt,&from9->slots);
949: to9->n = nx;
950: to9->first = first;
951: to9->step = step;
952: from9->n = nx;
953: #if defined(PETSC_USE_BOPT_g)
954: VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
955: #endif
956: PetscMemcpy(from9->slots,idx,nx*sizeof(PetscInt));
957: ctx->todata = (void*)to9; ctx->fromdata = (void*)from9;
958: ctx->postrecvs = 0;
959: if (step == 1) ctx->begin = VecScatterBegin_SGtoSS_Stride1;
960: else ctx->begin = VecScatterBegin_SGtoSS;
961: ctx->destroy = VecScatterDestroy_SGtoSS;
962: ctx->end = 0;
963: ctx->copy = VecScatterCopy_SGToSS;
964: to9->type = VEC_SCATTER_SEQ_STRIDE;
965: from9->type = VEC_SCATTER_SEQ_GENERAL;
966: PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector general to stride\n");
967: goto functionend;
968: } else if (ix->type == IS_STRIDE && iy->type == IS_GENERAL){
969: PetscInt nx,ny,*idy,first,step;
970: VecScatter_Seq_General *to10;
971: VecScatter_Seq_Stride *from10;
973: ISGetLocalSize(ix,&nx);
974: ISGetIndices(iy,&idy);
975: ISGetLocalSize(iy,&ny);
976: ISStrideGetInfo(ix,&first,&step);
977: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
978: PetscMalloc3(1,VecScatter_Seq_General,&to10,nx,PetscInt,&to10->slots,1,VecScatter_Seq_Stride,&from10);
979: from10->n = nx;
980: from10->first = first;
981: from10->step = step;
982: to10->n = nx;
983: #if defined(PETSC_USE_BOPT_g)
984: VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
985: #endif
986: PetscMemcpy(to10->slots,idy,nx*sizeof(PetscInt));
987: ctx->todata = (void*)to10;
988: ctx->fromdata = (void*)from10;
989: ctx->postrecvs = 0;
990: if (step == 1) ctx->begin = VecScatterBegin_SStoSG_Stride1;
991: else ctx->begin = VecScatterBegin_SStoSG;
992: ctx->destroy = VecScatterDestroy_SStoSG;
993: ctx->end = 0;
994: ctx->copy = 0;
995: to10->type = VEC_SCATTER_SEQ_GENERAL;
996: from10->type = VEC_SCATTER_SEQ_STRIDE;
997: PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector stride to general\n");
998: goto functionend;
999: } else {
1000: PetscInt nx,ny,*idx,*idy;
1001: VecScatter_Seq_General *to11,*from11;
1002: PetscTruth idnx,idny;
1004: ISGetLocalSize(ix,&nx);
1005: ISGetLocalSize(iy,&ny);
1006: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1008: ISIdentity(ix,&idnx);
1009: ISIdentity(iy,&idny);
1010: if (idnx && idny) {
1011: VecScatter_Seq_Stride *to13,*from13;
1012: PetscMalloc2(1,VecScatter_Seq_Stride,&to13,1,VecScatter_Seq_Stride,&from13);
1013: to13->n = nx;
1014: to13->first = 0;
1015: to13->step = 1;
1016: from13->n = nx;
1017: from13->first = 0;
1018: from13->step = 1;
1019: to13->type = VEC_SCATTER_SEQ_STRIDE;
1020: from13->type = VEC_SCATTER_SEQ_STRIDE;
1021: ctx->todata = (void*)to13;
1022: ctx->fromdata = (void*)from13;
1023: ctx->postrecvs = 0;
1024: ctx->begin = VecScatterBegin_SStoSS;
1025: ctx->end = 0;
1026: ctx->destroy = VecScatterDestroy_SStoSS;
1027: ctx->copy = 0;
1028: PetscLogInfo(xin,"VecScatterCreate:Special case: sequential copy\n");
1029: goto functionend;
1030: }
1032: ISGetIndices(iy,&idy);
1033: ISGetIndices(ix,&idx);
1034: PetscMalloc4(1,VecScatter_Seq_General,&to11,nx,PetscInt,&to11->slots,1,VecScatter_Seq_General,&from11,nx,PetscInt,&from11->slots);
1035: to11->n = nx;
1036: #if defined(PETSC_USE_BOPT_g)
1037: VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
1038: #endif
1039: PetscMemcpy(to11->slots,idy,nx*sizeof(PetscInt));
1040: from11->n = nx;
1041: #if defined(PETSC_USE_BOPT_g)
1042: VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
1043: #endif
1044: PetscMemcpy(from11->slots,idx,nx*sizeof(PetscInt));
1045: to11->type = VEC_SCATTER_SEQ_GENERAL;
1046: from11->type = VEC_SCATTER_SEQ_GENERAL;
1047: ctx->todata = (void*)to11;
1048: ctx->fromdata = (void*)from11;
1049: ctx->postrecvs = 0;
1050: ctx->begin = VecScatterBegin_SGtoSG;
1051: ctx->end = 0;
1052: ctx->destroy = VecScatterDestroy_SGtoSG;
1053: ctx->copy = VecScatterCopy_SGToSG;
1054: ISRestoreIndices(ix,&idx);
1055: ISRestoreIndices(iy,&idy);
1056: PetscLogInfo(xin,"VecScatterCreate:Sequential vector scatter with block indices\n");
1057: goto functionend;
1058: }
1059: }
1060: /* ---------------------------------------------------------------------------*/
1061: if (xin_type == VEC_MPI_ID && yin_type == VEC_SEQ_ID) {
1063: /* ===========================================================================================================
1064: Check for special cases
1065: ==========================================================================================================*/
1066: islocal = PETSC_FALSE;
1067: /* special case extracting (subset of) local portion */
1068: if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1069: PetscInt nx,ny,to_first,to_step,from_first,from_step;
1070: PetscInt start,end;
1071: VecScatter_Seq_Stride *from12,*to12;
1073: VecGetOwnershipRange(xin,&start,&end);
1074: ISGetLocalSize(ix,&nx);
1075: ISStrideGetInfo(ix,&from_first,&from_step);
1076: ISGetLocalSize(iy,&ny);
1077: ISStrideGetInfo(iy,&to_first,&to_step);
1078: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1079: if (ix->min >= start && ix->max < end) islocal = PETSC_TRUE; else islocal = PETSC_FALSE;
1080: MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1081: if (cando) {
1082: PetscMalloc2(1,VecScatter_Seq_Stride,&to12,1,VecScatter_Seq_Stride,&from12);
1083: to12->n = nx;
1084: to12->first = to_first;
1085: to12->step = to_step;
1086: from12->n = nx;
1087: from12->first = from_first-start;
1088: from12->step = from_step;
1089: to12->type = VEC_SCATTER_SEQ_STRIDE;
1090: from12->type = VEC_SCATTER_SEQ_STRIDE;
1091: ctx->todata = (void*)to12;
1092: ctx->fromdata = (void*)from12;
1093: ctx->postrecvs = 0;
1094: ctx->begin = VecScatterBegin_SStoSS;
1095: ctx->end = 0;
1096: ctx->destroy = VecScatterDestroy_SStoSS;
1097: ctx->copy = VecScatterCopy_PStoSS;
1098: PetscLogInfo(xin,"VecScatterCreate:Special case: processors only getting local values\n");
1099: goto functionend;
1100: }
1101: } else {
1102: MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1103: }
1105: /* test for special case of all processors getting entire vector */
1106: totalv = 0;
1107: if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1108: PetscInt i,nx,ny,to_first,to_step,from_first,from_step,N;
1109: PetscMPIInt *count;
1110: VecScatter_MPI_ToAll *sto;
1112: ISGetLocalSize(ix,&nx);
1113: ISStrideGetInfo(ix,&from_first,&from_step);
1114: ISGetLocalSize(iy,&ny);
1115: ISStrideGetInfo(iy,&to_first,&to_step);
1116: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1117: VecGetSize(xin,&N);
1118: if (nx != N) {
1119: totalv = 0;
1120: } else if (from_first == 0 && from_step == 1 &&
1121: from_first == to_first && from_step == to_step){
1122: totalv = 1;
1123: } else totalv = 0;
1124: MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1126: if (cando) {
1127: PetscMap map;
1129: MPI_Comm_size(ctx->comm,&size);
1130: PetscMalloc2(1,VecScatter_MPI_ToAll,&sto,size,PetscMPIInt,&count);
1131: VecGetPetscMap(xin,&map);
1132: PetscMapGetGlobalRange(map,&range);
1133: for (i=0; i<size; i++) {
1134: count[i] = range[i+1] - range[i];
1135: }
1136: sto->count = count;
1137: sto->work1 = 0;
1138: sto->work2 = 0;
1139: sto->type = VEC_SCATTER_MPI_TOALL;
1140: ctx->todata = (void*)sto;
1141: ctx->fromdata = 0;
1142: ctx->postrecvs = 0;
1143: ctx->begin = VecScatterBegin_MPI_ToAll;
1144: ctx->end = 0;
1145: ctx->destroy = VecScatterDestroy_MPI_ToAll;
1146: ctx->copy = VecScatterCopy_MPI_ToAll;
1147: PetscLogInfo(xin,"VecScatterCreate:Special case: all processors get entire parallel vector\n");
1148: goto functionend;
1149: }
1150: } else {
1151: MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1152: }
1154: /* test for special case of processor 0 getting entire vector */
1155: totalv = 0;
1156: if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1157: PetscInt i,nx,ny,to_first,to_step,from_first,from_step,N;
1158: PetscMPIInt rank,*count;
1159: VecScatter_MPI_ToAll *sto;
1161: PetscObjectGetComm((PetscObject)xin,&comm);
1162: MPI_Comm_rank(comm,&rank);
1163: ISGetLocalSize(ix,&nx);
1164: ISStrideGetInfo(ix,&from_first,&from_step);
1165: ISGetLocalSize(iy,&ny);
1166: ISStrideGetInfo(iy,&to_first,&to_step);
1167: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1168: if (!rank) {
1169: VecGetSize(xin,&N);
1170: if (nx != N) {
1171: totalv = 0;
1172: } else if (from_first == 0 && from_step == 1 &&
1173: from_first == to_first && from_step == to_step){
1174: totalv = 1;
1175: } else totalv = 0;
1176: } else {
1177: if (!nx) totalv = 1;
1178: else totalv = 0;
1179: }
1180: MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1182: if (cando) {
1183: PetscMap map;
1185: MPI_Comm_size(ctx->comm,&size);
1186: PetscMalloc2(1,VecScatter_MPI_ToAll,&sto,size,PetscMPIInt,&count);
1187: VecGetPetscMap(xin,&map);
1188: PetscMapGetGlobalRange(map,&range);
1189: for (i=0; i<size; i++) {
1190: count[i] = range[i+1] - range[i];
1191: }
1192: sto->count = count;
1193: sto->work1 = 0;
1194: sto->work2 = 0;
1195: sto->type = VEC_SCATTER_MPI_TOONE;
1196: ctx->todata = (void*)sto;
1197: ctx->fromdata = 0;
1198: ctx->postrecvs = 0;
1199: ctx->begin = VecScatterBegin_MPI_ToOne;
1200: ctx->end = 0;
1201: ctx->destroy = VecScatterDestroy_MPI_ToAll;
1202: ctx->copy = VecScatterCopy_MPI_ToAll;
1203: PetscLogInfo(xin,"VecScatterCreate:Special case: processor zero gets entire parallel vector, rest get none\n");
1204: goto functionend;
1205: }
1206: } else {
1207: MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1208: }
1210: ISBlock(ix,&ixblock);
1211: ISBlock(iy,&iyblock);
1212: ISStride(iy,&iystride);
1213: if (ixblock) {
1214: /* special case block to block */
1215: if (iyblock) {
1216: PetscInt nx,ny,*idx,*idy,bsx,bsy;
1217: ISBlockGetBlockSize(iy,&bsy);
1218: ISBlockGetBlockSize(ix,&bsx);
1219: if (bsx == bsy && (bsx == 12 || bsx == 5 || bsx == 4 || bsx == 3 || bsx == 2)) {
1220: ISBlockGetSize(ix,&nx);
1221: ISBlockGetIndices(ix,&idx);
1222: ISBlockGetSize(iy,&ny);
1223: ISBlockGetIndices(iy,&idy);
1224: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1225: VecScatterCreate_PtoS(nx,idx,ny,idy,xin,yin,bsx,ctx);
1226: ISBlockRestoreIndices(ix,&idx);
1227: ISBlockRestoreIndices(iy,&idy);
1228: PetscLogInfo(xin,"VecScatterCreate:Special case: blocked indices\n");
1229: goto functionend;
1230: }
1231: /* special case block to stride */
1232: } else if (iystride) {
1233: PetscInt ystart,ystride,ysize,bsx;
1234: ISStrideGetInfo(iy,&ystart,&ystride);
1235: ISGetLocalSize(iy,&ysize);
1236: ISBlockGetBlockSize(ix,&bsx);
1237: /* see if stride index set is equivalent to block index set */
1238: if (((bsx == 2) || (bsx == 3) || (bsx == 4) || (bsx == 5) || (bsx == 12)) &&
1239: ((ystart % bsx) == 0) && (ystride == 1) && ((ysize % bsx) == 0)) {
1240: PetscInt nx,*idx,*idy,il;
1241: ISBlockGetSize(ix,&nx); ISBlockGetIndices(ix,&idx);
1242: if (ysize != bsx*nx) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1243: PetscMalloc(nx*sizeof(PetscInt),&idy);
1244: if (nx) {
1245: idy[0] = ystart;
1246: for (il=1; il<nx; il++) idy[il] = idy[il-1] + bsx;
1247: }
1248: VecScatterCreate_PtoS(nx,idx,nx,idy,xin,yin,bsx,ctx);
1249: PetscFree(idy);
1250: ISBlockRestoreIndices(ix,&idx);
1251: PetscLogInfo(xin,"VecScatterCreate:Special case: blocked indices to stride\n");
1252: goto functionend;
1253: }
1254: }
1255: }
1256: /* left over general case */
1257: {
1258: PetscInt nx,ny,*idx,*idy;
1259: ISGetLocalSize(ix,&nx);
1260: ISGetIndices(ix,&idx);
1261: ISGetLocalSize(iy,&ny);
1262: ISGetIndices(iy,&idy);
1263: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1264: VecScatterCreate_PtoS(nx,idx,ny,idy,xin,yin,1,ctx);
1265: ISRestoreIndices(ix,&idx);
1266: ISRestoreIndices(iy,&idy);
1267: goto functionend;
1268: }
1269: }
1270: /* ---------------------------------------------------------------------------*/
1271: if (xin_type == VEC_SEQ_ID && yin_type == VEC_MPI_ID) {
1272: /* ===========================================================================================================
1273: Check for special cases
1274: ==========================================================================================================*/
1275: /* special case local copy portion */
1276: islocal = PETSC_FALSE;
1277: if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1278: PetscInt nx,ny,to_first,to_step,from_step,start,end,from_first;
1279: VecScatter_Seq_Stride *from,*to;
1281: VecGetOwnershipRange(yin,&start,&end);
1282: ISGetLocalSize(ix,&nx);
1283: ISStrideGetInfo(ix,&from_first,&from_step);
1284: ISGetLocalSize(iy,&ny);
1285: ISStrideGetInfo(iy,&to_first,&to_step);
1286: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1287: if (iy->min >= start && iy->max < end) islocal = PETSC_TRUE; else islocal = PETSC_FALSE;
1288: MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,yin->comm);
1289: if (cando) {
1290: PetscMalloc2(1,VecScatter_Seq_Stride,&to,1,VecScatter_Seq_Stride,&from);
1291: to->n = nx;
1292: to->first = to_first-start;
1293: to->step = to_step;
1294: from->n = nx;
1295: from->first = from_first;
1296: from->step = from_step;
1297: to->type = VEC_SCATTER_SEQ_STRIDE;
1298: from->type = VEC_SCATTER_SEQ_STRIDE;
1299: ctx->todata = (void*)to;
1300: ctx->fromdata = (void*)from;
1301: ctx->postrecvs = 0;
1302: ctx->begin = VecScatterBegin_SStoSS;
1303: ctx->end = 0;
1304: ctx->destroy = VecScatterDestroy_SStoSS;
1305: ctx->copy = VecScatterCopy_PStoSS;
1306: PetscLogInfo(xin,"VecScatterCreate:Special case: sequential stride to stride\n");
1307: goto functionend;
1308: }
1309: } else {
1310: MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,yin->comm);
1311: }
1312: /* general case */
1313: {
1314: PetscInt nx,ny,*idx,*idy;
1315: ISGetLocalSize(ix,&nx);
1316: ISGetIndices(ix,&idx);
1317: ISGetLocalSize(iy,&ny);
1318: ISGetIndices(iy,&idy);
1319: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1320: VecScatterCreate_StoP(nx,idx,ny,idy,yin,ctx);
1321: ISRestoreIndices(ix,&idx);
1322: ISRestoreIndices(iy,&idy);
1323: goto functionend;
1324: }
1325: }
1326: /* ---------------------------------------------------------------------------*/
1327: if (xin_type == VEC_MPI_ID && yin_type == VEC_MPI_ID) {
1328: /* no special cases for now */
1329: PetscInt nx,ny,*idx,*idy;
1330: ISGetLocalSize(ix,&nx);
1331: ISGetIndices(ix,&idx);
1332: ISGetLocalSize(iy,&ny);
1333: ISGetIndices(iy,&idy);
1334: if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1335: VecScatterCreate_PtoP(nx,idx,ny,idy,xin,yin,ctx);
1336: ISRestoreIndices(ix,&idx);
1337: ISRestoreIndices(iy,&idy);
1338: goto functionend;
1339: }
1341: functionend:
1342: *newctx = ctx;
1343: if (tix) {ISDestroy(tix);}
1344: if (tiy) {ISDestroy(tiy);}
1345: PetscOptionsHasName(PETSC_NULL,"-vecscatter_view_info",&flag);
1346: if (flag) {
1347: PetscViewerPushFormat(PETSC_VIEWER_STDOUT_(comm),PETSC_VIEWER_ASCII_INFO);
1348: VecScatterView(ctx,PETSC_VIEWER_STDOUT_(comm));
1349: PetscViewerPopFormat(PETSC_VIEWER_STDOUT_(comm));
1350: }
1351: PetscOptionsHasName(PETSC_NULL,"-vecscatter_view",&flag);
1352: if (flag) {
1353: VecScatterView(ctx,PETSC_VIEWER_STDOUT_(comm));
1354: }
1355: return(0);
1356: }
1358: /* ------------------------------------------------------------------*/
1361: /*@
1362: VecScatterPostRecvs - Posts the receives required for the ready-receiver
1363: version of the VecScatter routines.
1365: Collective on VecScatter
1367: Input Parameters:
1368: + x - the vector from which we scatter (not needed,can be null)
1369: . y - the vector to which we scatter
1370: . addv - either ADD_VALUES or INSERT_VALUES
1371: . mode - the scattering mode, usually SCATTER_FORWARD. The available modes are:
1372: SCATTER_FORWARD, SCATTER_REVERSE
1373: - inctx - scatter context generated by VecScatterCreate()
1375: Output Parameter:
1376: . y - the vector to which we scatter
1378: Level: advanced
1380: Notes:
1381: If you use SCATTER_REVERSE the first two arguments should be reversed, from
1382: the SCATTER_FORWARD.
1383: The vectors x and y cannot be the same. y[iy[i]] = x[ix[i]], for i=0,...,ni-1
1385: This scatter is far more general than the conventional
1386: scatter, since it can be a gather or a scatter or a combination,
1387: depending on the indices ix and iy. If x is a parallel vector and y
1388: is sequential, VecScatterBegin() can serve to gather values to a
1389: single processor. Similarly, if y is parallel and x sequential, the
1390: routine can scatter from one processor to many processors.
1392: .seealso: VecScatterCreate(), VecScatterEnd(), VecScatterBegin()
1393: @*/
1394: PetscErrorCode VecScatterPostRecvs(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter inctx)
1395: {
1402: if (inctx->postrecvs) {
1403: (*inctx->postrecvs)(x,y,addv,mode,inctx);
1404: }
1405: return(0);
1406: }
1408: /* ------------------------------------------------------------------*/
1411: /*@
1412: VecScatterBegin - Begins a generalized scatter from one vector to
1413: another. Complete the scattering phase with VecScatterEnd().
1415: Collective on VecScatter and Vec
1417: Input Parameters:
1418: + x - the vector from which we scatter
1419: . y - the vector to which we scatter
1420: . addv - either ADD_VALUES or INSERT_VALUES
1421: . mode - the scattering mode, usually SCATTER_FORWARD. The available modes are:
1422: SCATTER_FORWARD or SCATTER_REVERSE
1423: - inctx - scatter context generated by VecScatterCreate()
1425: Level: intermediate
1427: Notes:
1428: The vectors x and y need not be the same vectors used in the call
1429: to VecScatterCreate(), but x must have the same parallel data layout
1430: as that passed in as the x to VecScatterCreate(), similarly for the y.
1431: Most likely they have been obtained from VecDuplicate().
1433: You cannot change the values in the input vector between the calls to VecScatterBegin()
1434: and VecScatterEnd().
1436: If you use SCATTER_REVERSE the first two arguments should be reversed, from
1437: the SCATTER_FORWARD.
1438:
1439: y[iy[i]] = x[ix[i]], for i=0,...,ni-1
1441: This scatter is far more general than the conventional
1442: scatter, since it can be a gather or a scatter or a combination,
1443: depending on the indices ix and iy. If x is a parallel vector and y
1444: is sequential, VecScatterBegin() can serve to gather values to a
1445: single processor. Similarly, if y is parallel and x sequential, the
1446: routine can scatter from one processor to many processors.
1448: Concepts: scatter^between vectors
1449: Concepts: gather^between vectors
1451: .seealso: VecScatterCreate(), VecScatterEnd()
1452: @*/
1453: PetscErrorCode VecScatterBegin(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter inctx)
1454: {
1456: #if defined(PETSC_USE_BOPT_g)
1457: PetscInt to_n,from_n;
1458: #endif
1464: if (inctx->inuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE," Scatter ctx already in use");
1465: #if defined(PETSC_USE_BOPT_g)
1466: /*
1467: Error checking to make sure these vectors match the vectors used
1468: to create the vector scatter context. -1 in the from_n and to_n indicate the
1469: vector lengths are unknown (for example with mapped scatters) and thus
1470: no error checking is performed.
1471: */
1472: if (inctx->from_n >= 0 && inctx->to_n >= 0) {
1473: VecGetLocalSize(x,&from_n);
1474: VecGetLocalSize(y,&to_n);
1475: if (mode & SCATTER_REVERSE) {
1476: if (to_n != inctx->from_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter (scatter reverse and vector to != ctx from size)");
1477: if (from_n != inctx->to_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter (scatter reverse and vector from != ctx to size)");
1478: } else {
1479: if (to_n != inctx->to_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter (scatter forward and vector to != ctx to size)");
1480: if (from_n != inctx->from_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter (scatter forward and vector from != ctx from size)");
1481: }
1482: }
1483: #endif
1485: inctx->inuse = PETSC_TRUE;
1486: PetscLogEventBarrierBegin(VEC_ScatterBarrier,0,0,0,0,inctx->comm);
1487: (*inctx->begin)(x,y,addv,mode,inctx);
1488: if (inctx->beginandendtogether && inctx->end) {
1489: inctx->inuse = PETSC_FALSE;
1490: (*inctx->end)(x,y,addv,mode,inctx);
1491: }
1492: PetscLogEventBarrierEnd(VEC_ScatterBarrier,0,0,0,0,inctx->comm);
1493: return(0);
1494: }
1496: /* --------------------------------------------------------------------*/
1499: /*@
1500: VecScatterEnd - Ends a generalized scatter from one vector to another. Call
1501: after first calling VecScatterBegin().
1503: Collective on VecScatter and Vec
1505: Input Parameters:
1506: + x - the vector from which we scatter
1507: . y - the vector to which we scatter
1508: . addv - either ADD_VALUES or INSERT_VALUES.
1509: . mode - the scattering mode, usually SCATTER_FORWARD. The available modes are:
1510: SCATTER_FORWARD, SCATTER_REVERSE
1511: - ctx - scatter context generated by VecScatterCreate()
1513: Level: intermediate
1515: Notes:
1516: If you use SCATTER_REVERSE the first two arguments should be reversed, from
1517: the SCATTER_FORWARD.
1518: y[iy[i]] = x[ix[i]], for i=0,...,ni-1
1520: .seealso: VecScatterBegin(), VecScatterCreate()
1521: @*/
1522: PetscErrorCode VecScatterEnd(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
1523: {
1530: ctx->inuse = PETSC_FALSE;
1531: if (!ctx->end) return(0);
1532: if (!ctx->beginandendtogether) {
1533: PetscLogEventBegin(VEC_ScatterEnd,ctx,x,y,0);
1534: (*(ctx)->end)(x,y,addv,mode,ctx);
1535: PetscLogEventEnd(VEC_ScatterEnd,ctx,x,y,0);
1536: }
1537: return(0);
1538: }
1542: /*@C
1543: VecScatterDestroy - Destroys a scatter context created by
1544: VecScatterCreate().
1546: Collective on VecScatter
1548: Input Parameter:
1549: . ctx - the scatter context
1551: Level: intermediate
1553: .seealso: VecScatterCreate(), VecScatterCopy()
1554: @*/
1555: PetscErrorCode VecScatterDestroy(VecScatter ctx)
1556: {
1561: if (--ctx->refct > 0) return(0);
1563: /* if memory was published with AMS then destroy it */
1564: PetscObjectDepublish(ctx);
1566: (*ctx->destroy)(ctx);
1567: return(0);
1568: }
1572: /*@C
1573: VecScatterCopy - Makes a copy of a scatter context.
1575: Collective on VecScatter
1577: Input Parameter:
1578: . sctx - the scatter context
1580: Output Parameter:
1581: . ctx - the context copy
1583: Level: advanced
1585: .seealso: VecScatterCreate(), VecScatterDestroy()
1586: @*/
1587: PetscErrorCode VecScatterCopy(VecScatter sctx,VecScatter *ctx)
1588: {
1594: if (!sctx->copy) SETERRQ(PETSC_ERR_SUP,"Cannot copy this type");
1595: PetscHeaderCreate(*ctx,_p_VecScatter,int,VEC_SCATTER_COOKIE,0,"VecScatter",sctx->comm,VecScatterDestroy,VecScatterView);
1596: (*ctx)->to_n = sctx->to_n;
1597: (*ctx)->from_n = sctx->from_n;
1598: (*sctx->copy)(sctx,*ctx);
1599: return(0);
1600: }
1603: /* ------------------------------------------------------------------*/
1606: /*@
1607: VecScatterView - Views a vector scatter context.
1609: Collective on VecScatter
1611: Input Parameters:
1612: + ctx - the scatter context
1613: - viewer - the viewer for displaying the context
1615: Level: intermediate
1617: @*/
1618: PetscErrorCode VecScatterView(VecScatter ctx,PetscViewer viewer)
1619: {
1624: if (!viewer) viewer = PETSC_VIEWER_STDOUT_(ctx->comm);
1626: if (!ctx->view) SETERRQ(PETSC_ERR_SUP,"Cannot view this type of scatter context yet");
1628: (*ctx->view)(ctx,viewer);
1629: return(0);
1630: }
1634: /*@
1635: VecScatterRemap - Remaps the "from" and "to" indices in a
1636: vector scatter context. FOR EXPERTS ONLY!
1638: Collective on VecScatter
1640: Input Parameters:
1641: + scat - vector scatter context
1642: . from - remapping for "from" indices (may be PETSC_NULL)
1643: - to - remapping for "to" indices (may be PETSC_NULL)
1645: Level: developer
1647: Notes: In the parallel case the todata is actually the indices
1648: from which the data is TAKEN! The from stuff is where the
1649: data is finally put. This is VERY VERY confusing!
1651: In the sequential case the todata is the indices where the
1652: data is put and the fromdata is where it is taken from.
1653: This is backwards from the paralllel case! CRY! CRY! CRY!
1655: @*/
1656: PetscErrorCode VecScatterRemap(VecScatter scat,PetscInt *rto,PetscInt *rfrom)
1657: {
1658: VecScatter_Seq_General *to,*from;
1659: VecScatter_MPI_General *mto;
1660: PetscInt i;
1667: from = (VecScatter_Seq_General *)scat->fromdata;
1668: mto = (VecScatter_MPI_General *)scat->todata;
1670: if (mto->type == VEC_SCATTER_MPI_TOALL) SETERRQ(PETSC_ERR_ARG_SIZ,"Not for to all scatter");
1672: if (rto) {
1673: if (mto->type == VEC_SCATTER_MPI_GENERAL) {
1674: /* handle off processor parts */
1675: for (i=0; i<mto->starts[mto->n]; i++) {
1676: mto->indices[i] = rto[mto->indices[i]];
1677: }
1678: /* handle local part */
1679: to = &mto->local;
1680: for (i=0; i<to->n; i++) {
1681: to->slots[i] = rto[to->slots[i]];
1682: }
1683: } else if (from->type == VEC_SCATTER_SEQ_GENERAL) {
1684: for (i=0; i<from->n; i++) {
1685: from->slots[i] = rto[from->slots[i]];
1686: }
1687: } else if (from->type == VEC_SCATTER_SEQ_STRIDE) {
1688: VecScatter_Seq_Stride *sto = (VecScatter_Seq_Stride*)from;
1689:
1690: /* if the remapping is the identity and stride is identity then skip remap */
1691: if (sto->step == 1 && sto->first == 0) {
1692: for (i=0; i<sto->n; i++) {
1693: if (rto[i] != i) {
1694: SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1695: }
1696: }
1697: } else SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1698: } else SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1699: }
1701: if (rfrom) {
1702: SETERRQ(PETSC_ERR_SUP,"Unable to remap the FROM in scatters yet");
1703: }
1705: /*
1706: Mark then vector lengths as unknown because we do not know the
1707: lengths of the remapped vectors
1708: */
1709: scat->from_n = -1;
1710: scat->to_n = -1;
1712: return(0);
1713: }