Actual source code: vscat.c

  1: /*$Id: vscat.c,v 1.169 2001/03/23 23:21:18 balay Exp $*/

  3: /*
  4:      Code for creating scatters between vectors. This file 
  5:   includes the code for scattering between sequential vectors and
  6:   some special cases for parallel scatters.
  7: */

 9:  #include src/vec/is/isimpl.h
 10:  #include src/vec/vecimpl.h

 12: /*
 13:      Checks if any indices are less than zero and generates an error
 14: */
 15: static int VecScatterCheckIndices_Private(int nmax,int n,int *idx)
 16: {
 17:   int i;

 20:   for (i=0; i<n; i++) {
 21:     if (idx[i] < 0)     SETERRQ2(1,"Negative index %d at %d location",idx[i],i);
 22:     if (idx[i] >= nmax) SETERRQ3(1,"Index %d at %d location greater than max %d",idx[i],i,nmax);
 23:   }
 24:   return(0);
 25: }

 27: /*
 28:       This is special scatter code for when the entire parallel vector is 
 29:    copied to each processor.

 31:    This code was written by Cameron Cooper, Occidental College, Fall 1995,
 32:    will working at ANL as a SERS student.
 33: */
 34: int VecScatterBegin_MPI_ToAll(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
 35: {
 36:   int    ierr,yy_n,xx_n,*range;
 37:   Scalar *xv,*yv;
 38:   Map    map;

 41:   VecGetLocalSize(y,&yy_n);
 42:   VecGetLocalSize(x,&xx_n);
 43:   VecGetArray(y,&yv);
 44:   if (x != y) {VecGetArray(x,&xv);}

 46:   if (mode & SCATTER_REVERSE) {
 47:     Scalar               *xvt,*xvt2;
 48:     VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
 49:     int                  i;

 51:     if (addv == INSERT_VALUES) {
 52:       int rstart,rend;
 53:       /* 
 54:          copy the correct part of the local vector into the local storage of 
 55:          the MPI one.  Note: this operation only makes sense if all the local 
 56:          vectors have the same values
 57:       */
 58:       VecGetOwnershipRange(y,&rstart,&rend);
 59:       PetscMemcpy(yv,xv+rstart,yy_n*sizeof(Scalar));
 60:     } else {
 61:       MPI_Comm comm;
 62:       int      rank;
 63:       PetscObjectGetComm((PetscObject)y,&comm);
 64:       MPI_Comm_rank(comm,&rank);
 65:       if (scat->work1) xvt = scat->work1;
 66:       else {
 67:         ierr        = PetscMalloc((xx_n+1)*sizeof(Scalar),&xvt);
 68:         scat->work1 = xvt;
 69:         PetscLogObjectMemory(ctx,xx_n*sizeof(Scalar));
 70:       }
 71:       if (!rank) { /* I am the zeroth processor, values are accumulated here */
 72:         if   (scat->work2) xvt2 = scat->work2;
 73:         else {
 74:           ierr        = PetscMalloc((xx_n+1)*sizeof(Scalar),& xvt2);
 75:           scat->work2 = xvt2;
 76:           PetscLogObjectMemory(ctx,xx_n*sizeof(Scalar));
 77:         }
 78:         VecGetMap(y,&map);
 79:         MapGetGlobalRange(map,&range);
 80:         MPI_Gatherv(yv,yy_n,MPIU_SCALAR,xvt2,scat->count,range,MPIU_SCALAR,0,ctx->comm);
 81: #if defined(PETSC_USE_COMPLEX)
 82:         MPI_Reduce(xv,xvt,2*xx_n,MPI_DOUBLE,MPI_SUM,0,ctx->comm);
 83: #else
 84:         MPI_Reduce(xv,xvt,xx_n,MPIU_SCALAR,MPI_SUM,0,ctx->comm);
 85: #endif
 86:         if (addv == ADD_VALUES) {
 87:           for (i=0; i<xx_n; i++) {
 88:             xvt[i] += xvt2[i];
 89:           }
 90: #if !defined(PETSC_USE_COMPLEX)
 91:         } else if (addv == MAX_VALUES) {
 92:           for (i=0; i<xx_n; i++) {
 93:             xvt[i] = PetscMax(xvt[i],xvt2[i]);
 94:           }
 95: #endif
 96:         } else {SETERRQ(1,"Wrong insert option");}
 97:         MPI_Scatterv(xvt,scat->count,map->range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
 98:       } else {
 99:         VecGetMap(y,&map);
100:         MapGetGlobalRange(map,&range);
101:         MPI_Gatherv(yv,yy_n,MPIU_SCALAR,0, 0,0,MPIU_SCALAR,0,ctx->comm);
102: #if defined(PETSC_USE_COMPLEX)
103:         MPI_Reduce(xv,xvt,2*xx_n,MPI_DOUBLE,MPI_SUM,0,ctx->comm);
104: #else
105:         MPI_Reduce(xv,xvt,xx_n,MPIU_SCALAR,MPI_SUM,0,ctx->comm);
106: #endif
107:         MPI_Scatterv(0,scat->count,range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
108:       }
109:     }
110:   } else {
111:     Scalar               *yvt;
112:     VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
113:     int                  i;

115:     VecGetMap(x,&map);
116:     MapGetGlobalRange(map,&range);
117:     if (addv == INSERT_VALUES) {
118:       MPI_Allgatherv(xv,xx_n,MPIU_SCALAR,yv,scat->count,range,MPIU_SCALAR,ctx->comm);
119:     } else {
120:       if (scat->work1) yvt = scat->work1;
121:       else {
122:         ierr        = PetscMalloc((yy_n+1)*sizeof(Scalar),&yvt);
123:         scat->work1 = yvt;
124:         PetscLogObjectMemory(ctx,yy_n*sizeof(Scalar));
125:       }
126:       MPI_Allgatherv(xv,xx_n,MPIU_SCALAR,yvt,scat->count,map->range,MPIU_SCALAR,ctx->comm);
127:       if (addv == ADD_VALUES){
128:         for (i=0; i<yy_n; i++) {
129:           yv[i] += yvt[i];
130:         }
131: #if !defined(PETSC_USE_COMPLEX)
132:       } else if (addv == MAX_VALUES) {
133:         for (i=0; i<yy_n; i++) {
134:           yv[i] = PetscMax(yv[i],yvt[i]);
135:         }
136: #endif
137:       } else {SETERRQ(1,"Wrong insert option");}
138:     }
139:   }
140:   VecRestoreArray(y,&yv);
141:   if (x != y) {VecRestoreArray(x,&xv);}
142:   return(0);
143: }

145: /*
146:       This is special scatter code for when the entire parallel vector is 
147:    copied to processor 0.

149: */
150: int VecScatterBegin_MPI_ToOne(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
151: {
152:   int      rank,ierr,yy_n,xx_n,*range;
153:   Scalar   *xv,*yv;
154:   MPI_Comm comm;
155:   Map      map;

158:   VecGetLocalSize(y,&yy_n);
159:   VecGetLocalSize(x,&xx_n);
160:   VecGetArray(x,&xv);
161:   VecGetArray(y,&yv);

163:   PetscObjectGetComm((PetscObject)x,&comm);
164:   MPI_Comm_rank(comm,&rank);

166:   /* --------  Reverse scatter; spread from processor 0 to other processors */
167:   if (mode & SCATTER_REVERSE) {
168:     Scalar               *yvt;
169:     VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
170:     int                  i;

172:     VecGetMap(y,&map);
173:     MapGetGlobalRange(map,&range);
174:     if (addv == INSERT_VALUES) {
175:       MPI_Scatterv(xv,scat->count,range,MPIU_SCALAR,yv,yy_n,MPIU_SCALAR,0,ctx->comm);
176:     } else {
177:       if (scat->work2) yvt = scat->work2;
178:       else {
179:         ierr        = PetscMalloc((xx_n+1)*sizeof(Scalar),&yvt);
180:         scat->work2 = yvt;
181:         PetscLogObjectMemory(ctx,xx_n*sizeof(Scalar));
182:       }
183:       MPI_Scatterv(xv,scat->count,range,MPIU_SCALAR,yvt,yy_n,MPIU_SCALAR,0,ctx->comm);
184:       if (addv == ADD_VALUES) {
185:         for (i=0; i<yy_n; i++) {
186:           yv[i] += yvt[i];
187:         }
188: #if !defined(PETSC_USE_COMPLEX)
189:       } else if (addv == MAX_VALUES) {
190:         for (i=0; i<yy_n; i++) {
191:           yv[i] = PetscMax(yv[i],yvt[i]);
192:         }
193: #endif
194:       } else {SETERRQ(1,"Wrong insert option");}
195:     }
196:   /* ---------  Forward scatter; gather all values onto processor 0 */
197:   } else {
198:     Scalar               *yvt = 0;
199:     VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;
200:     int                  i;

202:     VecGetMap(x,&map);
203:     MapGetGlobalRange(map,&range);
204:     if (addv == INSERT_VALUES) {
205:       MPI_Gatherv(xv,xx_n,MPIU_SCALAR,yv,scat->count,range,MPIU_SCALAR,0,ctx->comm);
206:     } else {
207:       if (!rank) {
208:         if (scat->work1) yvt = scat->work1;
209:         else {
210:           ierr        = PetscMalloc((yy_n+1)*sizeof(Scalar),&yvt);
211:           scat->work1 = yvt;
212:           PetscLogObjectMemory(ctx,yy_n*sizeof(Scalar));
213:         }
214:       }
215:       MPI_Gatherv(xv,xx_n,MPIU_SCALAR,yvt,scat->count,range,MPIU_SCALAR,0,ctx->comm);
216:       if (!rank) {
217:         if (addv == ADD_VALUES) {
218:           for (i=0; i<yy_n; i++) {
219:             yv[i] += yvt[i];
220:           }
221: #if !defined(PETSC_USE_COMPLEX)
222:         } else if (addv == MAX_VALUES) {
223:           for (i=0; i<yy_n; i++) {
224:             yv[i] = PetscMax(yv[i],yvt[i]);
225:           }
226: #endif
227:         }  else {SETERRQ(1,"Wrong insert option");}
228:       }
229:     }
230:   }
231:   VecRestoreArray(x,&xv);
232:   VecRestoreArray(y,&yv);
233:   return(0);
234: }

236: /*
237:        The follow to are used for both VecScatterBegin_MPI_ToAll() and VecScatterBegin_MPI_ToOne()
238: */
239: int VecScatterDestroy_MPI_ToAll(VecScatter ctx)
240: {
241:   VecScatter_MPI_ToAll *scat = (VecScatter_MPI_ToAll*)ctx->todata;

245:   PetscFree(scat->count);
246:   if (scat->work1) {PetscFree(scat->work1);}
247:   if (scat->work2) {PetscFree(scat->work2);}
248:   PetscFree(ctx->todata);
249:   PetscLogObjectDestroy(ctx);
250:   PetscHeaderDestroy(ctx);
251:   return(0);
252: }

254: int VecScatterCopy_MPI_ToAll(VecScatter in,VecScatter out)
255: {
256:   VecScatter_MPI_ToAll *in_to = (VecScatter_MPI_ToAll*)in->todata,*sto;
257:   int                  size,i,ierr;

260:   out->postrecvs      = 0;
261:   out->begin          = in->begin;
262:   out->end            = in->end;
263:   out->copy           = in->copy;
264:   out->destroy        = in->destroy;
265:   out->view           = in->view;

267:   ierr      = PetscNew(VecScatter_MPI_ToAll,&sto);
268:   sto->type = in_to->type;

270:   MPI_Comm_size(out->comm,&size);
271:   PetscMalloc(size*sizeof(int),&sto->count);
272:   for (i=0; i<size; i++) {
273:     sto->count[i] = in_to->count[i];
274:   }
275:   sto->work1         = 0;
276:   sto->work2         = 0;
277:   PetscLogObjectMemory(out,sizeof(VecScatter_MPI_ToAll)+size*sizeof(int));
278:   out->todata        = (void*)sto;
279:   out->fromdata      = (void*)0;
280:   return(0);
281: }

283: /* --------------------------------------------------------------------------------------*/
284: /* 
285:         Scatter: sequential general to sequential general 
286: */
287: int VecScatterBegin_SGtoSG(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
288: {
289:   VecScatter_Seq_General *gen_to = (VecScatter_Seq_General*)ctx->todata;
290:   VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
291:   int                    i,n = gen_from->n,*fslots,*tslots,ierr;
292:   Scalar                 *xv,*yv;
293: 
295:   VecGetArray(x,&xv);
296:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
297:   if (mode & SCATTER_REVERSE){
298:     gen_to   = (VecScatter_Seq_General*)ctx->fromdata;
299:     gen_from = (VecScatter_Seq_General*)ctx->todata;
300:   }
301:   fslots = gen_from->slots;
302:   tslots = gen_to->slots;

304:   if (addv == INSERT_VALUES) {
305:     for (i=0; i<n; i++) {yv[tslots[i]] = xv[fslots[i]];}
306:   } else if (addv == ADD_VALUES) {
307:     for (i=0; i<n; i++) {yv[tslots[i]] += xv[fslots[i]];}
308: #if !defined(PETSC_USE_COMPLEX)
309:   } else  if (addv == MAX_VALUES) {
310:     for (i=0; i<n; i++) {yv[tslots[i]] = PetscMax(yv[tslots[i]],xv[fslots[i]]);}
311: #endif
312:   } else {SETERRQ(1,"Wrong insert option");}
313:   VecRestoreArray(x,&xv);
314:   if (x != y) {VecRestoreArray(y,&yv);}
315:   return(0);
316: }

318: /* 
319:     Scatter: sequential general to sequential stride 1 
320: */
321: int VecScatterBegin_SGtoSS_Stride1(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
322: {
323:   VecScatter_Seq_Stride  *gen_to   = (VecScatter_Seq_Stride*)ctx->todata;
324:   VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
325:   int                    i,n = gen_from->n,*fslots = gen_from->slots;
326:   int                    first = gen_to->first,ierr;
327:   Scalar                 *xv,*yv;
328: 
330:   VecGetArray(x,&xv);
331:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}
332:   if (mode & SCATTER_REVERSE){
333:     xv += first;
334:     if (addv == INSERT_VALUES) {
335:       for (i=0; i<n; i++) {yv[fslots[i]] = xv[i];}
336:     } else  if (addv == ADD_VALUES) {
337:       for (i=0; i<n; i++) {yv[fslots[i]] += xv[i];}
338: #if !defined(PETSC_USE_COMPLEX)
339:     } else  if (addv == MAX_VALUES) {
340:       for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[i]);}
341: #endif
342:     } else {SETERRQ(1,"Wrong insert option");}
343:   } else {
344:     yv += first;
345:     if (addv == INSERT_VALUES) {
346:       for (i=0; i<n; i++) {yv[i] = xv[fslots[i]];}
347:     } else  if (addv == ADD_VALUES) {
348:       for (i=0; i<n; i++) {yv[i] += xv[fslots[i]];}
349: #if !defined(PETSC_USE_COMPLEX)
350:     } else if (addv == MAX_VALUES) {
351:       for (i=0; i<n; i++) {yv[i] = PetscMax(yv[i],xv[fslots[i]]);}
352: #endif
353:     } else {SETERRQ(1,"Wrong insert option");}
354:   }
355:   VecRestoreArray(x,&xv);
356:   if (x != y) {VecRestoreArray(y,&yv);}
357:   return(0);
358: }

360: /* 
361:    Scatter: sequential general to sequential stride 
362: */
363: int VecScatterBegin_SGtoSS(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
364: {
365:   VecScatter_Seq_Stride  *gen_to   = (VecScatter_Seq_Stride*)ctx->todata;
366:   VecScatter_Seq_General *gen_from = (VecScatter_Seq_General*)ctx->fromdata;
367:   int                    i,n = gen_from->n,*fslots = gen_from->slots;
368:   int                    first = gen_to->first,step = gen_to->step,ierr;
369:   Scalar                 *xv,*yv;
370: 
372:   VecGetArray(x,&xv);
373:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}

375:   if (mode & SCATTER_REVERSE){
376:     if (addv == INSERT_VALUES) {
377:       for (i=0; i<n; i++) {yv[fslots[i]] = xv[first + i*step];}
378:     } else if (addv == ADD_VALUES) {
379:       for (i=0; i<n; i++) {yv[fslots[i]] += xv[first + i*step];}
380: #if !defined(PETSC_USE_COMPLEX)
381:     } else if (addv == MAX_VALUES) {
382:       for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[first + i*step]);}
383: #endif
384:     } else {SETERRQ(1,"Wrong insert option");}
385:   } else {
386:     if (addv == INSERT_VALUES) {
387:       for (i=0; i<n; i++) {yv[first + i*step] = xv[fslots[i]];}
388:     } else if (addv == ADD_VALUES) {
389:       for (i=0; i<n; i++) {yv[first + i*step] += xv[fslots[i]];}
390: #if !defined(PETSC_USE_COMPLEX)
391:     } else if (addv == MAX_VALUES) {
392:       for (i=0; i<n; i++) {yv[first + i*step] = PetscMax(yv[first + i*step],xv[fslots[i]]);}
393: #endif
394:     } else {SETERRQ(1,"Wrong insert option");}
395:   }
396:   VecRestoreArray(x,&xv);
397:   if (x != y) {VecRestoreArray(y,&yv);}
398:   return(0);
399: }

401: /* 
402:     Scatter: sequential stride 1 to sequential general 
403: */
404: int VecScatterBegin_SStoSG_Stride1(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
405: {
406:   VecScatter_Seq_Stride  *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
407:   VecScatter_Seq_General *gen_to   = (VecScatter_Seq_General*)ctx->todata;
408:   int                    i,n = gen_from->n,*fslots = gen_to->slots;
409:   int                    first = gen_from->first,ierr;
410:   Scalar                 *xv,*yv;
411: 
413:   VecGetArray(x,&xv);
414:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}

416:   if (mode & SCATTER_REVERSE){
417:     yv += first;
418:     if (addv == INSERT_VALUES) {
419:       for (i=0; i<n; i++) {yv[i] = xv[fslots[i]];}
420:     } else  if (addv == ADD_VALUES) {
421:       for (i=0; i<n; i++) {yv[i] += xv[fslots[i]];}
422: #if !defined(PETSC_USE_COMPLEX)
423:     } else  if (addv == MAX_VALUES) {
424:       for (i=0; i<n; i++) {yv[i] = PetscMax(yv[i],xv[fslots[i]]);}
425: #endif
426:     } else {SETERRQ(1,"Wrong insert option");}
427:   } else {
428:     xv += first;
429:     if (addv == INSERT_VALUES) {
430:       for (i=0; i<n; i++) {yv[fslots[i]] = xv[i];}
431:     } else  if (addv == ADD_VALUES) {
432:       for (i=0; i<n; i++) {yv[fslots[i]] += xv[i];}
433: #if !defined(PETSC_USE_COMPLEX)
434:     } else  if (addv == MAX_VALUES) {
435:       for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[i]);}
436: #endif
437:     } else {SETERRQ(1,"Wrong insert option");}
438:   }
439:   VecRestoreArray(x,&xv);
440:   if (x != y) {VecRestoreArray(y,&yv);}
441:   return(0);
442: }

444: /* 
445:    Scatter: sequential stride to sequential general 
446: */
447: int VecScatterBegin_SStoSG(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
448: {
449:   VecScatter_Seq_Stride  *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
450:   VecScatter_Seq_General *gen_to   = (VecScatter_Seq_General*)ctx->todata;
451:   int                    i,n = gen_from->n,*fslots = gen_to->slots;
452:   int                    first = gen_from->first,step = gen_from->step,ierr;
453:   Scalar                 *xv,*yv;
454: 
456:   VecGetArray(x,&xv);
457:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}

459:   if (mode & SCATTER_REVERSE){
460:     if (addv == INSERT_VALUES) {
461:       for (i=0; i<n; i++) {yv[first + i*step] = xv[fslots[i]];}
462:     } else  if (addv == ADD_VALUES) {
463:       for (i=0; i<n; i++) {yv[first + i*step] += xv[fslots[i]];}
464: #if !defined(PETSC_USE_COMPLEX)
465:     } else  if (addv == MAX_VALUES) {
466:       for (i=0; i<n; i++) {yv[first + i*step] = PetscMax(yv[first + i*step],xv[fslots[i]]);}
467: #endif
468:     } else {SETERRQ(1,"Wrong insert option");}
469:   } else {
470:     if (addv == INSERT_VALUES) {
471:       for (i=0; i<n; i++) {yv[fslots[i]] = xv[first + i*step];}
472:     } else  if (addv == ADD_VALUES) {
473:       for (i=0; i<n; i++) {yv[fslots[i]] += xv[first + i*step];}
474: #if !defined(PETSC_USE_COMPLEX)
475:     } else  if (addv == MAX_VALUES) {
476:       for (i=0; i<n; i++) {yv[fslots[i]] = PetscMax(yv[fslots[i]],xv[first + i*step]);}
477: #endif
478:     } else {SETERRQ(1,"Wrong insert option");}
479:   }
480:   VecRestoreArray(x,&xv);
481:   if (x != y) {VecRestoreArray(y,&yv);}
482:   return(0);
483: }

485: /* 
486:      Scatter: sequential stride to sequential stride 
487: */
488: int VecScatterBegin_SStoSS(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
489: {
490:   VecScatter_Seq_Stride *gen_to   = (VecScatter_Seq_Stride*)ctx->todata;
491:   VecScatter_Seq_Stride *gen_from = (VecScatter_Seq_Stride*)ctx->fromdata;
492:   int                   i,n = gen_from->n,to_first = gen_to->first,to_step = gen_to->step;
493:   int                   from_first = gen_from->first,from_step = gen_from->step,ierr;
494:   Scalar                *xv,*yv;
495: 
497:   VecGetArray(x,&xv);
498:   if (x != y) {VecGetArray(y,&yv);} else {yv = xv;}

500:   if (mode & SCATTER_REVERSE){
501:     from_first = gen_to->first;
502:     to_first   = gen_from->first;
503:     from_step  = gen_to->step;
504:     to_step    = gen_from->step;
505:   }

507:   if (addv == INSERT_VALUES) {
508:     if (to_step == 1 && from_step == 1) {
509:       PetscMemcpy(yv+to_first,xv+from_first,n*sizeof(Scalar));
510:     } else  {
511:       for (i=0; i<n; i++) {
512:         yv[to_first + i*to_step] = xv[from_first+i*from_step];
513:       }
514:     }
515:   } else if (addv == ADD_VALUES) {
516:     if (to_step == 1 && from_step == 1) {
517:       yv += to_first; xv += from_first;
518:       for (i=0; i<n; i++) {
519:         yv[i] += xv[i];
520:       }
521:     } else {
522:       for (i=0; i<n; i++) {
523:         yv[to_first + i*to_step] += xv[from_first+i*from_step];
524:       }
525:     }
526: #if !defined(PETSC_USE_COMPLEX)
527:   } else if (addv == MAX_VALUES) {
528:     if (to_step == 1 && from_step == 1) {
529:       yv += to_first; xv += from_first;
530:       for (i=0; i<n; i++) {
531:         yv[i] = PetscMax(yv[i],xv[i]);
532:       }
533:     } else {
534:       for (i=0; i<n; i++) {
535:         yv[to_first + i*to_step] = PetscMax(yv[to_first + i*to_step],xv[from_first+i*from_step]);
536:       }
537:     }
538: #endif
539:   } else {SETERRQ(1,"Wrong insert option");}
540:   VecRestoreArray(x,&xv);
541:   if (x != y) {VecRestoreArray(y,&yv);}
542:   return(0);
543: }

545: /* --------------------------------------------------------------------------*/


548: int VecScatterCopy_SGToSG(VecScatter in,VecScatter out)
549: {
550:   int                    ierr;
551:   VecScatter_Seq_General *in_to   = (VecScatter_Seq_General*)in->todata,*out_to;
552:   VecScatter_Seq_General *in_from = (VecScatter_Seq_General*)in->fromdata,*out_from;
553: 
555:   out->postrecvs     = 0;
556:   out->begin         = in->begin;
557:   out->end           = in->end;
558:   out->copy          = in->copy;
559:   out->destroy       = in->destroy;
560:   out->view          = in->view;

562:   ierr                           = PetscMalloc(in_to->n*sizeof(int)+sizeof(VecScatter_Seq_General),&out_to);
563:   out_to->n                      = in_to->n;
564:   out_to->type                   = in_to->type;
565:   out_to->nonmatching_computed   = PETSC_FALSE;
566:   out_to->slots_nonmatching      = 0;
567:   out_to->is_copy                = PETSC_FALSE;
568:   out_to->slots                  = (int*)(out_to + 1);
569:   PetscMemcpy(out_to->slots,in_to->slots,(out_to->n)*sizeof(int));

571:   ierr                           = PetscMalloc(in_from->n*sizeof(int)+sizeof(VecScatter_Seq_General),&out_from);
572:   out_from->n                    = in_from->n;
573:   out_from->type                 = in_from->type;
574:   out_from->nonmatching_computed = PETSC_FALSE;
575:   out_from->slots_nonmatching    = 0;
576:   out_from->is_copy              = PETSC_FALSE;
577:   out_from->slots                = (int*)(out_from + 1);
578:   PetscMemcpy(out_from->slots,in_from->slots,(out_from->n)*sizeof(int));

580:   PetscLogObjectMemory(out,2*sizeof(VecScatter_Seq_General)+(out_from->n+out_to->n)*sizeof(int));
581:   out->todata     = (void*)out_to;
582:   out->fromdata   = (void*)out_from;
583:   return(0);
584: }

586: int VecScatterDestroy_SGtoSG(VecScatter ctx)
587: {

591:   PetscFree(ctx->todata);
592:   PetscFree(ctx->fromdata);
593:   PetscLogObjectDestroy(ctx);
594:   PetscHeaderDestroy(ctx);
595:   return(0);
596: }

598: int VecScatterCopy_SGToStride(VecScatter in,VecScatter out)
599: {
601:   VecScatter_Seq_Stride  *in_to   = (VecScatter_Seq_Stride*)in->todata,*out_to;
602:   VecScatter_Seq_General *in_from = (VecScatter_Seq_General*)in->fromdata,*out_from;
603: 
605:   out->postrecvs     = 0;
606:   out->begin         = in->begin;
607:   out->end           = in->end;
608:   out->copy          = in->copy;
609:   out->destroy       = in->destroy;
610:   out->view          = in->view;

612:   ierr            = PetscNew(VecScatter_Seq_Stride,&out_to);
613:   out_to->n       = in_to->n;
614:   out_to->type    = in_to->type;
615:   out_to->first   = in_to->first;
616:   out_to->step    = in_to->step;
617:   out_to->type    = in_to->type;

619:   ierr                           = PetscMalloc(in_from->n*sizeof(int)+sizeof(VecScatter_Seq_General),&out_from);
620:   out_from->n                    = in_from->n;
621:   out_from->type                 = in_from->type;
622:   out_from->nonmatching_computed = PETSC_FALSE;
623:   out_from->slots_nonmatching    = 0;
624:   out_from->is_copy              = PETSC_FALSE;
625:   out_from->slots                = (int*)(out_from + 1);
626:   PetscMemcpy(out_from->slots,in_from->slots,(out_from->n)*sizeof(int));

628:   PetscLogObjectMemory(out,sizeof(VecScatter_Seq_General)+sizeof(VecScatter_Seq_Stride)+in_from->n*sizeof(int));
629:   out->todata     = (void*)out_to;
630:   out->fromdata   = (void*)out_from;
631:   return(0);
632: }

634: /* --------------------------------------------------------------------------*/
635: /* 
636:     Scatter: parallel to sequential vector, sequential strides for both. 
637: */
638: int VecScatterCopy_PStoSS(VecScatter in,VecScatter out)
639: {
640:   VecScatter_Seq_Stride *in_to   = (VecScatter_Seq_Stride*)in->todata,*out_to;
641:   VecScatter_Seq_Stride *in_from = (VecScatter_Seq_Stride*)in->fromdata,*out_from;
642:   int                   ierr;

645:   out->postrecvs  = 0;
646:   out->begin      = in->begin;
647:   out->end        = in->end;
648:   out->copy       = in->copy;
649:   out->destroy    = in->destroy;
650:   out->view       = in->view;

652:   ierr            = PetscNew(VecScatter_Seq_Stride,&out_to);
653:   out_to->n       = in_to->n;
654:   out_to->type    = in_to->type;
655:   out_to->first   = in_to->first;
656:   out_to->step    = in_to->step;
657:   out_to->type    = in_to->type;
658:   ierr            = PetscNew(VecScatter_Seq_Stride,&out_from);
659:   PetscLogObjectMemory(out,2*sizeof(VecScatter_Seq_Stride));
660:   out_from->n     = in_from->n;
661:   out_from->type  = in_from->type;
662:   out_from->first = in_from->first;
663:   out_from->step  = in_from->step;
664:   out_from->type  = in_from->type;
665:   out->todata     = (void*)out_to;
666:   out->fromdata   = (void*)out_from;
667:   return(0);
668: }

670: EXTERN int VecScatterCreate_PtoS(int,int *,int,int *,Vec,Vec,int,VecScatter);
671: EXTERN int VecScatterCreate_PtoP(int,int *,int,int *,Vec,Vec,VecScatter);
672: EXTERN int VecScatterCreate_StoP(int,int *,int,int *,Vec,VecScatter);

674: /* =======================================================================*/
675: #define VECSEQ 0
676: #define VECMPI 1

678: /*@C
679:    VecScatterCreate - Creates a vector scatter context.

681:    Collective on Vec

683:    Input Parameters:
684: +  xin - a vector that defines the shape (parallel data layout of the vector)
685:          of vectors from which we scatter
686: .  yin - a vector that defines the shape (parallel data layout of the vector)
687:          of vectors to which we scatter
688: .  ix - the indices of xin to scatter
689: -  iy - the indices of yin to hold results

691:    Output Parameter:
692: .  newctx - location to store the new scatter context

694:    Options Database:
695: +  -vecscatter_merge     - Merges scatter send and receive (may offer better performance with some MPIs)
696: .  -vecscatter_ssend     - Uses MPI_Ssend_init() instead of MPI_Send_init() (may offer better performance with some MPIs)
697: .  -vecscatter_sendfirst - Posts sends before receives (may offer better performance with some MPIs)
698: .  -vecscatter_rr        - use ready receiver mode for MPI sends in scatters (rarely used)
699: -  -vecscatter_packtogether - Pack all messages before sending, receive all messages before unpacking
700:     Level: intermediate

702:   Notes:
703:    In calls to VecScatter() you can use different vectors than the xin and 
704:    yin you used above; BUT they must have the same parallel data layout, for example,
705:    they could be obtained from VecDuplicate().
706:    A VecScatter context CANNOT be used in two or more simultaneous scatters;
707:    that is you cannot call a second VecScatterBegin() with the same scatter
708:    context until the VecScatterEnd() has been called on the first VecScatterBegin().
709:    In this case a separate VecScatter is needed for each concurrent scatter.

711:    Concepts: scatter^between vectors
712:    Concepts: gather^between vectors

714: .seealso: VecScatterDestroy()
715: @*/
716: int VecScatterCreate(Vec xin,IS ix,Vec yin,IS iy,VecScatter *newctx)
717: {
718:   VecScatter ctx;
719:   int        len,size,cando,totalv,ierr,*range,xin_type = VECSEQ,yin_type = VECSEQ;
720:   PetscTruth flag;
721:   MPI_Comm   comm,ycomm;
722:   PetscTruth ixblock,iyblock,iystride,islocal;
723:   IS         tix = 0,tiy = 0;


727:   /*
728:       Determine if the vectors are "parallel", ie. it shares a comm with other processors, or
729:       sequential (it does not share a comm). The difference is that parallel vectors treat the 
730:       index set as providing indices in the global parallel numbering of the vector, with 
731:       sequential vectors treat the index set as providing indices in the local sequential
732:       numbering
733:   */
734:   PetscObjectGetComm((PetscObject)xin,&comm);
735:   MPI_Comm_size(comm,&size);
736:   if (size > 1) {xin_type = VECMPI;}

738:   PetscObjectGetComm((PetscObject)yin,&ycomm);
739:   MPI_Comm_size(ycomm,&size);
740:   if (size > 1) {comm = ycomm; yin_type = VECMPI;}
741: 
742:   /* generate the Scatter context */
743:   PetscHeaderCreate(ctx,_p_VecScatter,int,VEC_SCATTER_COOKIE,0,"VecScatter",comm,VecScatterDestroy,VecScatterView);
744:   PetscLogObjectCreate(ctx);
745:   PetscLogObjectMemory(ctx,sizeof(struct _p_VecScatter));
746:   ctx->inuse               = PETSC_FALSE;

748:   ctx->beginandendtogether = PETSC_FALSE;
749:   PetscOptionsHasName(PETSC_NULL,"-vecscatter_merge",&ctx->beginandendtogether);
750:   if (ctx->beginandendtogether) {
751:     PetscLogInfo(ctx,"VecScatterCreate:Using combined (merged) vector scatter begin and endn");
752:   }
753:   PetscOptionsHasName(PETSC_NULL,"-vecscatter_packtogether",&ctx->packtogether);
754:   if (ctx->packtogether) {
755:     PetscLogInfo(ctx,"VecScatterCreate:Pack all messages before sendingn");
756:   }

758:   VecGetLocalSize(xin,&ctx->from_n);
759:   VecGetLocalSize(yin,&ctx->to_n);

761:   /*
762:       if ix or iy is not included; assume just grabbing entire vector
763:   */
764:   if (!ix && xin_type == VECSEQ) {
765:     ISCreateStride(comm,ctx->from_n,0,1,&ix);
766:     tix  = ix;
767:   } else if (!ix && xin_type == VECMPI) {
768:     int bign;
769:     VecGetSize(xin,&bign);
770:     ISCreateStride(comm,bign,0,1,&ix);
771:     tix  = ix;
772:   } else if (!ix) {
773:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"iy not given, but not Seq or MPI vector");
774:   }

776:   if (!iy && yin_type == VECSEQ) {
777:     ISCreateStride(comm,ctx->to_n,0,1,&iy);
778:     tiy  = iy;
779:   } else if (!iy && yin_type == VECMPI) {
780:     int bign;
781:     VecGetSize(yin,&bign);
782:     ISCreateStride(comm,bign,0,1,&iy);
783:     tiy  = iy;
784:   } else if (!iy) {
785:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"iy not given, but not Seq or MPI vector");
786:   }

788:   /* ===========================================================================================================
789:         Check for special cases
790:      ==========================================================================================================*/
791:   /* ---------------------------------------------------------------------------*/
792:   if (xin_type == VECSEQ && yin_type == VECSEQ) {
793:     if (ix->type == IS_GENERAL && iy->type == IS_GENERAL){
794:       int                    nx,ny,*idx,*idy;
795:       VecScatter_Seq_General *to,*from;

797:       ISGetLocalSize(ix,&nx);
798:       ISGetLocalSize(iy,&ny);
799:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
800:       ISGetIndices(ix,&idx);
801:       ISGetIndices(iy,&idy);
802:       len  = sizeof(VecScatter_Seq_General) + nx*sizeof(int);
803:       PetscMalloc(len,&to);
804:       PetscLogObjectMemory(ctx,2*len);
805:       to->slots         = (int*)(to + 1);
806:       to->n             = nx;
807:       VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
808:       PetscMemcpy(to->slots,idy,nx*sizeof(int));
809:       PetscMalloc(len,&from);
810:       from->slots       = (int*)(from + 1);
811:       from->n           = nx;
812:       VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
813:        PetscMemcpy(from->slots,idx,nx*sizeof(int));
814:       to->type          = VEC_SCATTER_SEQ_GENERAL;
815:       from->type        = VEC_SCATTER_SEQ_GENERAL;
816:       ctx->todata       = (void*)to;
817:       ctx->fromdata     = (void*)from;
818:       ctx->postrecvs    = 0;
819:       ctx->begin        = VecScatterBegin_SGtoSG;
820:       ctx->end          = 0;
821:       ctx->destroy      = VecScatterDestroy_SGtoSG;
822:       ctx->copy         = VecScatterCopy_SGToSG;
823:       PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector general scattern");
824:       goto functionend;
825:     } else if (ix->type == IS_STRIDE &&  iy->type == IS_STRIDE){
826:       int                    nx,ny,to_first,to_step,from_first,from_step;
827:       VecScatter_Seq_Stride  *from8,*to8;

829:       ISGetLocalSize(ix,&nx);
830:       ISGetLocalSize(iy,&ny);
831:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
832:       ISStrideGetInfo(iy,&to_first,&to_step);
833:       ISStrideGetInfo(ix,&from_first,&from_step);
834:       ierr               = PetscNew(VecScatter_Seq_Stride,&to8);
835:       to8->n             = nx;
836:       to8->first         = to_first;
837:       to8->step          = to_step;
838:       ierr               = PetscNew(VecScatter_Seq_Stride,&from8);
839:       PetscLogObjectMemory(ctx,2*sizeof(VecScatter_Seq_Stride));
840:       from8->n           = nx;
841:       from8->first       = from_first;
842:       from8->step        = from_step;
843:       to8->type          = VEC_SCATTER_SEQ_STRIDE;
844:       from8->type        = VEC_SCATTER_SEQ_STRIDE;
845:       ctx->todata       = (void*)to8;
846:       ctx->fromdata     = (void*)from8;
847:       ctx->postrecvs    = 0;
848:       ctx->begin        = VecScatterBegin_SStoSS;
849:       ctx->end          = 0;
850:       ctx->destroy      = VecScatterDestroy_SGtoSG;
851:       ctx->copy         = 0;
852:       PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector stride to striden");
853:       goto functionend;
854:     } else if (ix->type == IS_GENERAL && iy->type == IS_STRIDE){
855:       int                    nx,ny,*idx,first,step;
856:       VecScatter_Seq_General *from9;
857:       VecScatter_Seq_Stride  *to9;

859:       ISGetLocalSize(ix,&nx);
860:       ISGetIndices(ix,&idx);
861:       ISGetLocalSize(iy,&ny);
862:       ISStrideGetInfo(iy,&first,&step);
863:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
864:       ierr           = PetscNew(VecScatter_Seq_Stride,&to9);
865:       to9->n         = nx;
866:       to9->first     = first;
867:       to9->step      = step;
868:       len            = sizeof(VecScatter_Seq_General) + nx*sizeof(int);
869:       PetscMalloc(len,&from9);
870:       PetscLogObjectMemory(ctx,len + sizeof(VecScatter_Seq_Stride));
871:       from9->slots   = (int*)(from9 + 1);
872:       from9->n       = nx;
873:       ierr           = VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
874:       ierr           = PetscMemcpy(from9->slots,idx,nx*sizeof(int));
875:       ctx->todata    = (void*)to9; ctx->fromdata = (void*)from9;
876:       ctx->postrecvs = 0;
877:       if (step == 1)  ctx->begin = VecScatterBegin_SGtoSS_Stride1;
878:       else            ctx->begin = VecScatterBegin_SGtoSS;
879:       ctx->destroy = VecScatterDestroy_SGtoSG;
880:       ctx->end     = 0;
881:       ctx->copy    = VecScatterCopy_SGToStride;
882:       to9->type    = VEC_SCATTER_SEQ_STRIDE;
883:       from9->type  = VEC_SCATTER_SEQ_GENERAL;
884:       PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector general to striden");
885:       goto functionend;
886:     } else if (ix->type == IS_STRIDE && iy->type == IS_GENERAL){
887:       int                    nx,ny,*idy,first,step;
888:       VecScatter_Seq_General *to10;
889:       VecScatter_Seq_Stride  *from10;

891:       ISGetLocalSize(ix,&nx);
892:       ISGetIndices(iy,&idy);
893:       ISGetLocalSize(iy,&ny);
894:       ISStrideGetInfo(ix,&first,&step);
895:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
896:       ierr              = PetscNew(VecScatter_Seq_Stride,&from10);
897:       from10->n         = nx;
898:       from10->first     = first;
899:       from10->step      = step;
900:       len               = sizeof(VecScatter_Seq_General) + nx*sizeof(int);
901:       PetscMalloc(len,&to10);
902:       PetscLogObjectMemory(ctx,len + sizeof(VecScatter_Seq_Stride));
903:       to10->slots       = (int*)(to10 + 1);
904:       to10->n           = nx;
905:       VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
906:       PetscMemcpy(to10->slots,idy,nx*sizeof(int));
907:       ctx->todata     = (void*)to10;
908:       ctx->fromdata   = (void*)from10;
909:       ctx->postrecvs  = 0;
910:       if (step == 1) ctx->begin = VecScatterBegin_SStoSG_Stride1;
911:       else           ctx->begin = VecScatterBegin_SStoSG;
912:       ctx->destroy    = VecScatterDestroy_SGtoSG;
913:       ctx->end        = 0;
914:       ctx->copy       = 0;
915:       to10->type      = VEC_SCATTER_SEQ_GENERAL;
916:       from10->type    = VEC_SCATTER_SEQ_STRIDE;
917:       PetscLogInfo(xin,"VecScatterCreate:Special case: sequential vector stride to generaln");
918:       goto functionend;
919:     } else {
920:       int                    nx,ny,*idx,*idy;
921:       VecScatter_Seq_General *to11,*from11;
922:       PetscTruth             idnx,idny;

924:       ISGetLocalSize(ix,&nx);
925:       ISGetLocalSize(iy,&ny);
926:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");

928:       ISIdentity(ix,&idnx);
929:       ISIdentity(iy,&idny);
930:       if (idnx && idny) {
931:         VecScatter_Seq_Stride *to13,*from13;
932:         ierr              = PetscNew(VecScatter_Seq_Stride,&to13);
933:         to13->n           = nx;
934:         to13->first       = 0;
935:         to13->step        = 1;
936:         ierr              = PetscNew(VecScatter_Seq_Stride,&from13);
937:         PetscLogObjectMemory(ctx,2*sizeof(VecScatter_Seq_Stride));
938:         from13->n         = nx;
939:         from13->first     = 0;
940:         from13->step      = 1;
941:         to13->type        = VEC_SCATTER_SEQ_STRIDE;
942:         from13->type      = VEC_SCATTER_SEQ_STRIDE;
943:         ctx->todata       = (void*)to13;
944:         ctx->fromdata     = (void*)from13;
945:         ctx->postrecvs    = 0;
946:         ctx->begin        = VecScatterBegin_SStoSS;
947:         ctx->end          = 0;
948:         ctx->destroy      = VecScatterDestroy_SGtoSG;
949:         ctx->copy         = 0;
950:         PetscLogInfo(xin,"VecScatterCreate:Special case: sequential copyn");
951:         goto functionend;
952:       }

954:       ISGetIndices(iy,&idy);
955:       ISGetIndices(ix,&idx);
956:       len               = sizeof(VecScatter_Seq_General) + nx*sizeof(int);
957:       PetscMalloc(len,&to11);
958:       PetscLogObjectMemory(ctx,2*len);
959:       to11->slots       = (int*)(to11 + 1);
960:       to11->n           = nx;
961:       VecScatterCheckIndices_Private(ctx->to_n,ny,idy);
962:        PetscMemcpy(to11->slots,idy,nx*sizeof(int));
963:       PetscMalloc(len,&from11);
964:       from11->slots     = (int*)(from11 + 1);
965:       from11->n         = nx;
966:       VecScatterCheckIndices_Private(ctx->from_n,nx,idx);
967:       PetscMemcpy(from11->slots,idx,nx*sizeof(int));
968:       to11->type        = VEC_SCATTER_SEQ_GENERAL;
969:       from11->type      = VEC_SCATTER_SEQ_GENERAL;
970:       ctx->todata       = (void*)to11;
971:       ctx->fromdata     = (void*)from11;
972:       ctx->postrecvs    = 0;
973:       ctx->begin        = VecScatterBegin_SGtoSG;
974:       ctx->end          = 0;
975:       ctx->destroy      = VecScatterDestroy_SGtoSG;
976:       ctx->copy         = VecScatterCopy_SGToSG;
977:       ISRestoreIndices(ix,&idx);
978:       ISRestoreIndices(iy,&idy);
979:       PetscLogInfo(xin,"VecScatterCreate:Sequential vector scatter with block indicesn");
980:       goto functionend;
981:     }
982:   }
983:   /* ---------------------------------------------------------------------------*/
984:   if (xin_type == VECMPI && yin_type == VECSEQ) {

986:   /* ===========================================================================================================
987:         Check for special cases
988:      ==========================================================================================================*/
989:     islocal = PETSC_FALSE;
990:     /* special case extracting (subset of) local portion */
991:     if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
992:       int                   nx,ny,to_first,to_step,from_first,from_step;
993:       int                   start,end;
994:       VecScatter_Seq_Stride *from12,*to12;

996:       VecGetOwnershipRange(xin,&start,&end);
997:       ISGetLocalSize(ix,&nx);
998:       ISStrideGetInfo(ix,&from_first,&from_step);
999:       ISGetLocalSize(iy,&ny);
1000:       ISStrideGetInfo(iy,&to_first,&to_step);
1001:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1002:       if (ix->min >= start && ix->max < end) islocal = PETSC_TRUE; else islocal = PETSC_FALSE;
1003:       MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1004:       if (cando) {
1005:         ierr                = PetscNew(VecScatter_Seq_Stride,&to12);
1006:         to12->n             = nx;
1007:         to12->first         = to_first;
1008:         to12->step          = to_step;
1009:         ierr                = PetscNew(VecScatter_Seq_Stride,&from12);
1010:         PetscLogObjectMemory(ctx,2*sizeof(VecScatter_Seq_Stride));
1011:         from12->n           = nx;
1012:         from12->first       = from_first-start;
1013:         from12->step        = from_step;
1014:         to12->type          = VEC_SCATTER_SEQ_STRIDE;
1015:         from12->type        = VEC_SCATTER_SEQ_STRIDE;
1016:         ctx->todata       = (void*)to12;
1017:         ctx->fromdata     = (void*)from12;
1018:         ctx->postrecvs    = 0;
1019:         ctx->begin        = VecScatterBegin_SStoSS;
1020:         ctx->end          = 0;
1021:         ctx->destroy      = VecScatterDestroy_SGtoSG;
1022:         ctx->copy         = VecScatterCopy_PStoSS;
1023:         PetscLogInfo(xin,"VecScatterCreate:Special case: processors only getting local valuesn");
1024:         goto functionend;
1025:       }
1026:     } else {
1027:       MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1028:     }

1030:     /* test for special case of all processors getting entire vector */
1031:     totalv = 0;
1032:     if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1033:       int                  i,nx,ny,to_first,to_step,from_first,from_step,*count,N;
1034:       VecScatter_MPI_ToAll *sto;

1036:       ISGetLocalSize(ix,&nx);
1037:       ISStrideGetInfo(ix,&from_first,&from_step);
1038:       ISGetLocalSize(iy,&ny);
1039:       ISStrideGetInfo(iy,&to_first,&to_step);
1040:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1041:       VecGetSize(xin,&N);
1042:       if (nx != N) {
1043:         totalv = 0;
1044:       } else if (from_first == 0        && from_step == 1 &&
1045:                  from_first == to_first && from_step == to_step){
1046:         totalv = 1;
1047:       } else totalv = 0;
1048:       MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);

1050:       if (cando) {
1051:         Map map;

1053:         ierr  = MPI_Comm_size(ctx->comm,&size);
1054:         ierr  = PetscNew(VecScatter_MPI_ToAll,&sto);
1055:         ierr  = PetscMalloc(size*sizeof(int),&count);
1056:         ierr  = VecGetMap(xin,&map);
1057:         ierr  = MapGetGlobalRange(map,&range);
1058:         for (i=0; i<size; i++) {
1059:           count[i] = range[i+1] - range[i];
1060:         }
1061:         sto->count        = count;
1062:         sto->work1        = 0;
1063:         sto->work2        = 0;
1064:         sto->type         = VEC_SCATTER_MPI_TOALL;
1065:         PetscLogObjectMemory(ctx,sizeof(VecScatter_MPI_ToAll)+size*sizeof(int));
1066:         ctx->todata       = (void*)sto;
1067:         ctx->fromdata     = 0;
1068:         ctx->postrecvs    = 0;
1069:         ctx->begin        = VecScatterBegin_MPI_ToAll;
1070:         ctx->end          = 0;
1071:         ctx->destroy      = VecScatterDestroy_MPI_ToAll;
1072:         ctx->copy         = VecScatterCopy_MPI_ToAll;
1073:         PetscLogInfo(xin,"VecScatterCreate:Special case: all processors get entire parallel vectorn");
1074:         goto functionend;
1075:       }
1076:     } else {
1077:       MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1078:     }

1080:     /* test for special case of processor 0 getting entire vector */
1081:     totalv = 0;
1082:     if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1083:       int                  i,nx,ny,to_first,to_step,from_first,from_step,*count,rank,N;
1084:       VecScatter_MPI_ToAll *sto;

1086:       PetscObjectGetComm((PetscObject)xin,&comm);
1087:       MPI_Comm_rank(comm,&rank);
1088:       ISGetLocalSize(ix,&nx);
1089:       ISStrideGetInfo(ix,&from_first,&from_step);
1090:       ISGetLocalSize(iy,&ny);
1091:       ISStrideGetInfo(iy,&to_first,&to_step);
1092:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1093:       if (!rank) {
1094:         VecGetSize(xin,&N);
1095:         if (nx != N) {
1096:           totalv = 0;
1097:         } else if (from_first == 0        && from_step == 1 &&
1098:                    from_first == to_first && from_step == to_step){
1099:           totalv = 1;
1100:         } else totalv = 0;
1101:       } else {
1102:         if (!nx) totalv = 1;
1103:         else     totalv = 0;
1104:       }
1105:       MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);

1107:       if (cando) {
1108:         Map map;

1110:         ierr  = MPI_Comm_size(ctx->comm,&size);
1111:         ierr  = PetscNew(VecScatter_MPI_ToAll,&sto);
1112:         ierr  = PetscMalloc(size*sizeof(int),&count);
1113:         ierr  = VecGetMap(xin,&map);
1114:         ierr  = MapGetGlobalRange(map,&range);
1115:         for (i=0; i<size; i++) {
1116:           count[i] = range[i+1] - range[i];
1117:         }
1118:         sto->count        = count;
1119:         sto->work1        = 0;
1120:         sto->work2        = 0;
1121:         sto->type         = VEC_SCATTER_MPI_TOONE;
1122:         PetscLogObjectMemory(ctx,sizeof(VecScatter_MPI_ToAll)+size*sizeof(int));
1123:         ctx->todata       = (void*)sto;
1124:         ctx->fromdata     = 0;
1125:         ctx->postrecvs    = 0;
1126:         ctx->begin        = VecScatterBegin_MPI_ToOne;
1127:         ctx->end          = 0;
1128:         ctx->destroy      = VecScatterDestroy_MPI_ToAll;
1129:         ctx->copy         = VecScatterCopy_MPI_ToAll;
1130:         PetscLogInfo(xin,"VecScatterCreate:Special case: processor zero gets entire parallel vector, rest get nonen");
1131:         goto functionend;
1132:       }
1133:     } else {
1134:       MPI_Allreduce(&totalv,&cando,1,MPI_INT,MPI_LAND,xin->comm);
1135:     }

1137:     ISBlock(ix,&ixblock);
1138:     ISBlock(iy,&iyblock);
1139:     ISStride(iy,&iystride);
1140:     if (ixblock) {
1141:       /* special case block to block */
1142:       if (iyblock) {
1143:         int nx,ny,*idx,*idy,bsx,bsy;
1144:         ISBlockGetBlockSize(iy,&bsy);
1145:         ISBlockGetBlockSize(ix,&bsx);
1146:         if (bsx == bsy && (bsx == 12 || bsx == 5 || bsx == 4 || bsx == 3 || bsx == 2)) {
1147:           ISBlockGetSize(ix,&nx);
1148:           ISBlockGetIndices(ix,&idx);
1149:           ISBlockGetSize(iy,&ny);
1150:           ISBlockGetIndices(iy,&idy);
1151:           if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1152:           VecScatterCreate_PtoS(nx,idx,ny,idy,xin,yin,bsx,ctx);
1153:           ISBlockRestoreIndices(ix,&idx);
1154:           ISBlockRestoreIndices(iy,&idy);
1155:           PetscLogInfo(xin,"VecScatterCreate:Special case: blocked indicesn");
1156:           goto functionend;
1157:         }
1158:       /* special case block to stride */
1159:       } else if (iystride) {
1160:         int ystart,ystride,ysize,bsx;
1161:         ISStrideGetInfo(iy,&ystart,&ystride);
1162:         ISGetLocalSize(iy,&ysize);
1163:         ISBlockGetBlockSize(ix,&bsx);
1164:         /* see if stride index set is equivalent to block index set */
1165:         if (((bsx == 2) || (bsx == 3) || (bsx == 4) || (bsx == 5) || (bsx == 12)) &&
1166:             ((ystart % bsx) == 0) && (ystride == 1) && ((ysize % bsx) == 0)) {
1167:           int nx,*idx,*idy,il;
1168:           ISBlockGetSize(ix,&nx); ISBlockGetIndices(ix,&idx);
1169:           if (ysize != bsx*nx) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1170:           PetscMalloc((nx+1)*sizeof(int),&idy);
1171:           idy[0] = ystart;
1172:           for (il=1; il<nx; il++) idy[il] = idy[il-1] + bsx;
1173:           VecScatterCreate_PtoS(nx,idx,nx,idy,xin,yin,bsx,ctx);
1174:           PetscFree(idy);
1175:           ISBlockRestoreIndices(ix,&idx);
1176:           PetscLogInfo(xin,"VecScatterCreate:Special case: blocked indices to striden");
1177:           goto functionend;
1178:         }
1179:       }
1180:     }
1181:     /* left over general case */
1182:     {
1183:       int nx,ny,*idx,*idy;
1184:       ISGetLocalSize(ix,&nx);
1185:       ISGetIndices(ix,&idx);
1186:       ISGetLocalSize(iy,&ny);
1187:       ISGetIndices(iy,&idy);
1188:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1189:       VecScatterCreate_PtoS(nx,idx,ny,idy,xin,yin,1,ctx);
1190:       ISRestoreIndices(ix,&idx);
1191:       ISRestoreIndices(iy,&idy);
1192:       goto functionend;
1193:     }
1194:   }
1195:   /* ---------------------------------------------------------------------------*/
1196:   if (xin_type == VECSEQ && yin_type == VECMPI) {
1197:   /* ===========================================================================================================
1198:         Check for special cases
1199:      ==========================================================================================================*/
1200:     /* special case local copy portion */
1201:     islocal = PETSC_FALSE;
1202:     if (ix->type == IS_STRIDE && iy->type == IS_STRIDE){
1203:       int                   nx,ny,to_first,to_step,from_step,start,end,from_first;
1204:       VecScatter_Seq_Stride *from,*to;

1206:       VecGetOwnershipRange(yin,&start,&end);
1207:       ISGetLocalSize(ix,&nx);
1208:       ISStrideGetInfo(ix,&from_first,&from_step);
1209:       ISGetLocalSize(iy,&ny);
1210:       ISStrideGetInfo(iy,&to_first,&to_step);
1211:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1212:       if (iy->min >= start && iy->max < end) islocal = PETSC_TRUE; else islocal = PETSC_FALSE;
1213:       MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,yin->comm);
1214:       if (cando) {
1215:         ierr              = PetscNew(VecScatter_Seq_Stride,&to);
1216:         to->n             = nx;
1217:         to->first         = to_first-start;
1218:         to->step          = to_step;
1219:         ierr              = PetscNew(VecScatter_Seq_Stride,&from);
1220:         PetscLogObjectMemory(ctx,2*sizeof(VecScatter_Seq_Stride));
1221:         from->n           = nx;
1222:         from->first       = from_first;
1223:         from->step        = from_step;
1224:         to->type          = VEC_SCATTER_SEQ_STRIDE;
1225:         from->type        = VEC_SCATTER_SEQ_STRIDE;
1226:         ctx->todata       = (void*)to;
1227:         ctx->fromdata     = (void*)from;
1228:         ctx->postrecvs    = 0;
1229:         ctx->begin        = VecScatterBegin_SStoSS;
1230:         ctx->end          = 0;
1231:         ctx->destroy      = VecScatterDestroy_SGtoSG;
1232:         ctx->copy         = VecScatterCopy_PStoSS;
1233:         PetscLogInfo(xin,"VecScatterCreate:Special case: sequential stride to striden");
1234:         goto functionend;
1235:       }
1236:     } else {
1237:       MPI_Allreduce(&islocal,&cando,1,MPI_INT,MPI_LAND,yin->comm);
1238:     }
1239:     /* general case */
1240:     {
1241:       int nx,ny,*idx,*idy;
1242:       ISGetLocalSize(ix,&nx);
1243:       ISGetIndices(ix,&idx);
1244:       ISGetLocalSize(iy,&ny);
1245:       ISGetIndices(iy,&idy);
1246:       if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1247:       VecScatterCreate_StoP(nx,idx,ny,idy,yin,ctx);
1248:       ISRestoreIndices(ix,&idx);
1249:       ISRestoreIndices(iy,&idy);
1250:       goto functionend;
1251:     }
1252:   }
1253:   /* ---------------------------------------------------------------------------*/
1254:   if (xin_type == VECMPI && yin_type == VECMPI) {
1255:     /* no special cases for now */
1256:     int nx,ny,*idx,*idy;
1257:     ierr    = ISGetLocalSize(ix,&nx);
1258:     ierr    = ISGetIndices(ix,&idx);
1259:     ierr    = ISGetLocalSize(iy,&ny);
1260:     ierr    = ISGetIndices(iy,&idy);
1261:     if (nx != ny) SETERRQ(PETSC_ERR_ARG_SIZ,"Local scatter sizes don't match");
1262:     ierr    = VecScatterCreate_PtoP(nx,idx,ny,idy,xin,yin,ctx);
1263:     ierr    = ISRestoreIndices(ix,&idx);
1264:     ierr    = ISRestoreIndices(iy,&idy);
1265:     goto functionend;
1266:   }

1268:   functionend:
1269:   *newctx = ctx;
1270:   if (tix) {ISDestroy(tix);}
1271:   if (tiy) {ISDestroy(tiy);}
1272:   PetscOptionsHasName(PETSC_NULL,"-vecscatter_view_info",&flag);
1273:   if (flag) {
1274:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_(comm),PETSC_VIEWER_ASCII_INFO);
1275:     VecScatterView(ctx,PETSC_VIEWER_STDOUT_(comm));
1276:     PetscViewerPopFormat(PETSC_VIEWER_STDOUT_(comm));
1277:   }
1278:   PetscOptionsHasName(PETSC_NULL,"-vecscatter_view",&flag);
1279:   if (flag) {
1280:     VecScatterView(ctx,PETSC_VIEWER_STDOUT_(comm));
1281:   }
1282:   return(0);
1283: }

1285: /* ------------------------------------------------------------------*/
1286: /*@
1287:    VecScatterPostRecvs - Posts the receives required for the ready-receiver
1288:    version of the VecScatter routines.

1290:    Collective on VecScatter

1292:    Input Parameters:
1293: +  x - the vector from which we scatter (not needed,can be null)
1294: .  y - the vector to which we scatter
1295: .  addv - either ADD_VALUES or INSERT_VALUES
1296: .  mode - the scattering mode, usually SCATTER_FORWARD.  The available modes are:
1297:     SCATTER_FORWARD, SCATTER_REVERSE
1298: -  inctx - scatter context generated by VecScatterCreate()

1300:    Output Parameter:
1301: .  y - the vector to which we scatter

1303:    Level: advanced

1305:    Notes:
1306:    If you use SCATTER_REVERSE the first two arguments should be reversed, from 
1307:    the SCATTER_FORWARD.
1308:    The vectors x and y cannot be the same. y[iy[i]] = x[ix[i]], for i=0,...,ni-1

1310:    This scatter is far more general than the conventional
1311:    scatter, since it can be a gather or a scatter or a combination,
1312:    depending on the indices ix and iy.  If x is a parallel vector and y
1313:    is sequential, VecScatterBegin() can serve to gather values to a
1314:    single processor.  Similarly, if y is parallel and x sequential, the
1315:    routine can scatter from one processor to many processors.

1317: .seealso: VecScatterCreate(), VecScatterEnd(), VecScatterBegin()
1318: @*/
1319: int VecScatterPostRecvs(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter inctx)
1320: {


1327:   if (inctx->postrecvs) {
1328:     (*inctx->postrecvs)(x,y,addv,mode,inctx);
1329:   }
1330:   return(0);
1331: }

1333: /* ------------------------------------------------------------------*/
1334: /*@
1335:    VecScatterBegin - Begins a generalized scatter from one vector to
1336:    another. Complete the scattering phase with VecScatterEnd().

1338:    Collective on VecScatter and Vec

1340:    Input Parameters:
1341: +  x - the vector from which we scatter
1342: .  y - the vector to which we scatter
1343: .  addv - either ADD_VALUES or INSERT_VALUES
1344: .  mode - the scattering mode, usually SCATTER_FORWARD.  The available modes are:
1345:     SCATTER_FORWARD or SCATTER_REVERSE
1346: -  inctx - scatter context generated by VecScatterCreate()

1348:    Level: intermediate

1350:    Notes:
1351:    The vectors x and y need not be the same vectors used in the call 
1352:    to VecScatterCreate(), but x must have the same parallel data layout
1353:    as that passed in as the x to VecScatterCreate(), similarly for the y.
1354:    Most likely they have been obtained from VecDuplicate().

1356:    You cannot change the values in the input vector between the calls to VecScatterBegin()
1357:    and VecScatterEnd().

1359:    If you use SCATTER_REVERSE the first two arguments should be reversed, from 
1360:    the SCATTER_FORWARD.
1361:    
1362:    y[iy[i]] = x[ix[i]], for i=0,...,ni-1

1364:    This scatter is far more general than the conventional
1365:    scatter, since it can be a gather or a scatter or a combination,
1366:    depending on the indices ix and iy.  If x is a parallel vector and y
1367:    is sequential, VecScatterBegin() can serve to gather values to a
1368:    single processor.  Similarly, if y is parallel and x sequential, the
1369:    routine can scatter from one processor to many processors.

1371:    Concepts: scatter^between vectors
1372:    Concepts: gather^between vectors

1374: .seealso: VecScatterCreate(), VecScatterEnd()
1375: @*/
1376: int VecScatterBegin(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter inctx)
1377: {
1379: #if defined(PETSC_USE_BOPT_g)
1380:   int to_n,from_n;
1381: #endif

1386:   if (inctx->inuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE," Scatter ctx already in use");
1387: #if defined(PETSC_USE_BOPT_g)
1388:   /*
1389:      Error checking to make sure these vectors match the vectors used
1390:    to create the vector scatter context. -1 in the from_n and to_n indicate the
1391:    vector lengths are unknown (for example with mapped scatters) and thus 
1392:    no error checking is performed.
1393:   */
1394:   if (inctx->from_n >= 0 && inctx->to_n >= 0) {
1395:     VecGetLocalSize(x,&from_n);
1396:     VecGetLocalSize(y,&to_n);
1397:     if (mode & SCATTER_REVERSE) {
1398:       if (to_n != inctx->from_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter");
1399:       if (from_n != inctx->to_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter");
1400:     } else {
1401:       if (to_n != inctx->to_n)     SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter");
1402:       if (from_n != inctx->from_n) SETERRQ(PETSC_ERR_ARG_SIZ,"Vector wrong size for scatter");
1403:     }
1404:   }
1405: #endif

1407:   inctx->inuse = PETSC_TRUE;
1408:   PetscLogEventBarrierBegin(VEC_ScatterBarrier,0,0,0,0,inctx->comm);
1409:   (*inctx->begin)(x,y,addv,mode,inctx);
1410:   if (inctx->beginandendtogether && inctx->end) {
1411:     inctx->inuse = PETSC_FALSE;
1412:     (*inctx->end)(x,y,addv,mode,inctx);
1413:   }
1414:   PetscLogEventBarrierEnd(VEC_ScatterBarrier,0,0,0,0,inctx->comm);
1415:   return(0);
1416: }

1418: /* --------------------------------------------------------------------*/
1419: /*@
1420:    VecScatterEnd - Ends a generalized scatter from one vector to another.  Call
1421:    after first calling VecScatterBegin().

1423:    Collective on VecScatter and Vec

1425:    Input Parameters:
1426: +  x - the vector from which we scatter
1427: .  y - the vector to which we scatter
1428: .  addv - either ADD_VALUES or INSERT_VALUES.
1429: .  mode - the scattering mode, usually SCATTER_FORWARD.  The available modes are:
1430:      SCATTER_FORWARD, SCATTER_REVERSE
1431: -  ctx - scatter context generated by VecScatterCreate()

1433:    Level: intermediate

1435:    Notes:
1436:    If you use SCATTER_REVERSE the first two arguments should be reversed, from 
1437:    the SCATTER_FORWARD.
1438:    y[iy[i]] = x[ix[i]], for i=0,...,ni-1

1440: .seealso: VecScatterBegin(), VecScatterCreate()
1441: @*/
1442: int VecScatterEnd(Vec x,Vec y,InsertMode addv,ScatterMode mode,VecScatter ctx)
1443: {

1449:   ctx->inuse = PETSC_FALSE;
1450:   if (!ctx->end) return(0);
1451:   if (!ctx->beginandendtogether) {
1452:     PetscLogEventBegin(VEC_ScatterEnd,ctx,x,y,0);
1453:     (*(ctx)->end)(x,y,addv,mode,ctx);
1454:     PetscLogEventEnd(VEC_ScatterEnd,ctx,x,y,0);
1455:   }
1456:   return(0);
1457: }

1459: /*@C
1460:    VecScatterDestroy - Destroys a scatter context created by 
1461:    VecScatterCreate().

1463:    Collective on VecScatter

1465:    Input Parameter:
1466: .  ctx - the scatter context

1468:    Level: intermediate

1470: .seealso: VecScatterCreate(), VecScatterCopy()
1471: @*/
1472: int VecScatterDestroy(VecScatter ctx)
1473: {

1478:   if (--ctx->refct > 0) return(0);

1480:   /* if memory was published with AMS then destroy it */
1481:   PetscObjectDepublish(ctx);

1483:   (*ctx->destroy)(ctx);
1484:   return(0);
1485: }

1487: /*@C
1488:    VecScatterCopy - Makes a copy of a scatter context.

1490:    Collective on VecScatter

1492:    Input Parameter:
1493: .  sctx - the scatter context

1495:    Output Parameter:
1496: .  ctx - the context copy

1498:    Level: advanced

1500: .seealso: VecScatterCreate(), VecScatterDestroy()
1501: @*/
1502: int VecScatterCopy(VecScatter sctx,VecScatter *ctx)
1503: {

1509:   if (!sctx->copy) SETERRQ(PETSC_ERR_SUP,"Cannot copy this type");
1510:   PetscHeaderCreate(*ctx,_p_VecScatter,int,VEC_SCATTER_COOKIE,0,"VecScatter",sctx->comm,VecScatterDestroy,VecScatterView);
1511:   PetscLogObjectCreate(*ctx);
1512:   PetscLogObjectMemory(*ctx,sizeof(struct _p_VecScatter));
1513:   (*ctx)->to_n   = sctx->to_n;
1514:   (*ctx)->from_n = sctx->from_n;
1515:   (*sctx->copy)(sctx,*ctx);
1516:   return(0);
1517: }


1520: /* ------------------------------------------------------------------*/
1521: /*@
1522:    VecScatterView - Views a vector scatter context.

1524:    Collective on VecScatter

1526:    Input Parameters:
1527: +  ctx - the scatter context
1528: -  viewer - the viewer for displaying the context

1530:    Level: intermediate

1532: @*/
1533: int VecScatterView(VecScatter ctx,PetscViewer viewer)
1534: {

1539:   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(ctx->comm);
1541:   if (!ctx->view) SETERRQ(PETSC_ERR_SUP,"Cannot view this type of scatter context yet");

1543:   (*ctx->view)(ctx,viewer);
1544:   return(0);
1545: }

1547: /*@
1548:    VecScatterRemap - Remaps the "from" and "to" indices in a 
1549:    vector scatter context. FOR EXPERTS ONLY!

1551:    Collective on VecScatter

1553:    Input Parameters:
1554: +  scat - vector scatter context
1555: .  from - remapping for "from" indices (may be PETSC_NULL)
1556: -  to   - remapping for "to" indices (may be PETSC_NULL)

1558:    Level: developer

1560:    Notes: In the parallel case the todata is actually the indices
1561:           from which the data is TAKEN! The from stuff is where the 
1562:           data is finally put. This is VERY VERY confusing!

1564:           In the sequential case the todata is the indices where the 
1565:           data is put and the fromdata is where it is taken from.
1566:           This is backwards from the paralllel case! CRY! CRY! CRY!

1568: @*/
1569: int VecScatterRemap(VecScatter scat,int *rto,int *rfrom)
1570: {
1571:   VecScatter_Seq_General *to,*from;
1572:   VecScatter_MPI_General *mto;
1573:   int                    i;


1580:   from = (VecScatter_Seq_General *)scat->fromdata;
1581:   mto  = (VecScatter_MPI_General *)scat->todata;

1583:   if (mto->type == VEC_SCATTER_MPI_TOALL) SETERRQ(PETSC_ERR_ARG_SIZ,"Not for to all scatter");

1585:   if (rto) {
1586:     if (mto->type == VEC_SCATTER_MPI_GENERAL) {
1587:       /* handle off processor parts */
1588:       for (i=0; i<mto->starts[mto->n]; i++) {
1589:         mto->indices[i] = rto[mto->indices[i]];
1590:       }
1591:       /* handle local part */
1592:       to = &mto->local;
1593:       for (i=0; i<to->n; i++) {
1594:         to->slots[i] = rto[to->slots[i]];
1595:       }
1596:     } else if (from->type == VEC_SCATTER_SEQ_GENERAL) {
1597:       for (i=0; i<from->n; i++) {
1598:         from->slots[i] = rto[from->slots[i]];
1599:       }
1600:     } else if (from->type == VEC_SCATTER_SEQ_STRIDE) {
1601:       VecScatter_Seq_Stride *sto = (VecScatter_Seq_Stride*)from;
1602: 
1603:       /* if the remapping is the identity and stride is identity then skip remap */
1604:       if (sto->step == 1 && sto->first == 0) {
1605:         for (i=0; i<sto->n; i++) {
1606:           if (rto[i] != i) {
1607:             SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1608:           }
1609:         }
1610:       } else SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1611:     } else SETERRQ(PETSC_ERR_ARG_SIZ,"Unable to remap such scatters");
1612:   }

1614:   if (rfrom) {
1615:     SETERRQ(PETSC_ERR_SUP,"Unable to remap the FROM in scatters yet");
1616:   }

1618:   /*
1619:      Mark then vector lengths as unknown because we do not know the 
1620:    lengths of the remapped vectors
1621:   */
1622:   scat->from_n = -1;
1623:   scat->to_n   = -1;

1625:   return(0);
1626: }