Actual source code: pbvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
 5:  #include src/vec/vec/impls/mpi/pvecimpl.h

  7: #if 0
 10: static PetscErrorCode VecPublish_MPI(PetscObject obj)
 11: {
 13:   return(0);
 14: }
 15: #endif

 19: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 20: {
 21:   PetscScalar    sum,work;

 25:   VecDot_Seq(xin,yin,&work);
 26:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);
 27:   *z = sum;
 28:   return(0);
 29: }

 33: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 34: {
 35:   PetscScalar    sum,work;

 39:   VecTDot_Seq(xin,yin,&work);
 40:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);
 41:   *z   = sum;
 42:   return(0);
 43: }

 47: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
 48: {
 50:   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
 51:     v->stash.donotstash = flag;
 52:   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
 53:     v->stash.ignorenegidx = flag;
 54:   }
 55:   return(0);
 56: }
 57: 
 58: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
 60: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);

 65: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 66: {
 68:   Vec_MPI        *v = (Vec_MPI *)vin->data;

 71:   if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 72:   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
 73:   v->array = (PetscScalar *)a;
 74:   if (v->localrep) {
 75:     VecPlaceArray(v->localrep,a);
 76:   }
 77:   return(0);
 78: }

 80: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
 81: EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);

 83: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
 84:             VecDuplicateVecs_Default,
 85:             VecDestroyVecs_Default,
 86:             VecDot_MPI,
 87:             VecMDot_MPI,
 88:             VecNorm_MPI,
 89:             VecTDot_MPI,
 90:             VecMTDot_MPI,
 91:             VecScale_Seq,
 92:             VecCopy_Seq, /* 10 */
 93:             VecSet_Seq,
 94:             VecSwap_Seq,
 95:             VecAXPY_Seq,
 96:             VecAXPBY_Seq,
 97:             VecMAXPY_Seq,
 98:             VecAYPX_Seq,
 99:             VecWAXPY_Seq,
100:             VecPointwiseMult_Seq,
101:             VecPointwiseDivide_Seq,
102:             VecSetValues_MPI, /* 20 */
103:             VecAssemblyBegin_MPI,
104:             VecAssemblyEnd_MPI,
105:             VecGetArray_Seq,
106:             VecGetSize_MPI,
107:             VecGetSize_Seq,
108:             VecRestoreArray_Seq,
109:             VecMax_MPI,
110:             VecMin_MPI,
111:             VecSetRandom_Seq,
112:             VecSetOption_MPI,
113:             VecSetValuesBlocked_MPI,
114:             VecDestroy_MPI,
115:             VecView_MPI,
116:             VecPlaceArray_MPI,
117:             VecReplaceArray_Seq,
118:             VecDot_Seq,
119:             VecTDot_Seq,
120:             VecNorm_Seq,
121:             VecMDot_Seq,
122:             VecMTDot_Seq,
123:             VecLoadIntoVector_Default,
124:             VecReciprocal_Default,
125:             0, /* VecViewNative... */
126:             VecConjugate_Seq,
127:             0,
128:             0,
129:             VecResetArray_Seq,
130:             0,
131:             VecMaxPointwiseDivide_Seq,
132:             VecLoad_Binary,
133:             VecPointwiseMax_Seq,
134:             VecPointwiseMaxAbs_Seq,
135:             VecPointwiseMin_Seq,
136:             VecGetValues_MPI};

140: /*
141:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
142:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
143:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
144: */
145: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[])
146: {
147:   Vec_MPI        *s;


152:   PetscNewLog(v,Vec_MPI,&s);
153:   v->data        = (void*)s;
154:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
155:   s->nghost      = nghost;
156:   v->mapping     = 0;
157:   v->bmapping    = 0;
158:   v->petscnative = PETSC_TRUE;

160:   if (v->map.bs == -1) v->map.bs = 1;
161:   PetscMapSetUp(&v->map);
162:   if (array) {
163:     s->array           = (PetscScalar *)array;
164:     s->array_allocated = 0;
165:   } else {
166:     PetscInt n         = v->map.n+nghost;
167:     PetscMalloc(n*sizeof(PetscScalar),&s->array);
168:     PetscLogObjectMemory(v,n*sizeof(PetscScalar));
169:     PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));
170:     s->array_allocated = s->array;
171:   }

173:   /* By default parallel vectors do not have local representation */
174:   s->localrep    = 0;
175:   s->localupdate = 0;

177:   v->stash.insertmode  = NOT_SET_VALUES;
178:   /* create the stashes. The block-size for bstash is set later when 
179:      VecSetValuesBlocked is called.
180:   */
181:   VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);
182:   VecStashCreate_Private(((PetscObject)v)->comm,v->map.bs,&v->bstash);
183: 
184: #if defined(PETSC_HAVE_MATLAB_ENGINE)
185:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
186:   PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
187: #endif
188:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
189:   PetscPublishAll(v);
190:   return(0);
191: }

193: /*MC
194:    VECMPI - VECMPI = "mpi" - The basic parallel vector

196:    Options Database Keys:
197: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()

199:   Level: beginner

201: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
202: M*/

207: PetscErrorCode  VecCreate_MPI(Vec vv)
208: {

212:   VecCreate_MPI_Private(vv,0,0);
213:   return(0);
214: }

219: /*@C
220:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
221:    where the user provides the array space to store the vector values.

223:    Collective on MPI_Comm

225:    Input Parameters:
226: +  comm  - the MPI communicator to use
227: .  n     - local vector length, cannot be PETSC_DECIDE
228: .  N     - global vector length (or PETSC_DECIDE to have calculated)
229: -  array - the user provided array to store the vector values

231:    Output Parameter:
232: .  vv - the vector
233:  
234:    Notes:
235:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
236:    same type as an existing vector.

238:    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
239:    at a later stage to SET the array for storing the vector values.

241:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
242:    The user should not free the array until the vector is destroyed.

244:    Level: intermediate

246:    Concepts: vectors^creating with array

248: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
249:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

251: @*/
252: PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
253: {

257:   if (n == PETSC_DECIDE) {
258:     SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
259:   }
260:   PetscSplitOwnership(comm,&n,&N);
261:   VecCreate(comm,vv);
262:   VecSetSizes(*vv,n,N);
263:   VecCreate_MPI_Private(*vv,0,array);
264:   return(0);
265: }

269: /*@
270:     VecGhostGetLocalForm - Obtains the local ghosted representation of 
271:     a parallel vector created with VecCreateGhost().

273:     Not Collective

275:     Input Parameter:
276: .   g - the global vector. Vector must be have been obtained with either
277:         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().

279:     Output Parameter:
280: .   l - the local (ghosted) representation

282:     Notes:
283:     This routine does not actually update the ghost values, but rather it
284:     returns a sequential vector that includes the locations for the ghost
285:     values and their current values. The returned vector and the original
286:     vector passed in share the same array that contains the actual vector data.

288:     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
289:     finished using the object.

291:     Level: advanced

293:    Concepts: vectors^ghost point access

295: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()

297: @*/
298: PetscErrorCode  VecGhostGetLocalForm(Vec g,Vec *l)
299: {
301:   PetscTruth     isseq,ismpi;


307:   PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
308:   PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
309:   if (ismpi) {
310:     Vec_MPI *v  = (Vec_MPI*)g->data;
311:     if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
312:     *l = v->localrep;
313:   } else if (isseq) {
314:     *l = g;
315:   } else {
316:     SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
317:   }
318:   PetscObjectReference((PetscObject)*l);
319:   return(0);
320: }

324: /*@
325:     VecGhostRestoreLocalForm - Restores the local ghosted representation of 
326:     a parallel vector obtained with VecGhostGetLocalForm().

328:     Not Collective

330:     Input Parameter:
331: +   g - the global vector
332: -   l - the local (ghosted) representation

334:     Notes:
335:     This routine does not actually update the ghost values, but rather it
336:     returns a sequential vector that includes the locations for the ghost values
337:     and their current values.

339:     Level: advanced

341: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
342: @*/
343: PetscErrorCode  VecGhostRestoreLocalForm(Vec g,Vec *l)
344: {
346:   PetscObjectDereference((PetscObject)*l);
347:   return(0);
348: }

352: /*@
353:    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
354:    local representation to global or global representation to local.

356:    Collective on Vec

358:    Input Parameters:
359: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
360: .  insertmode - one of ADD_VALUES or INSERT_VALUES
361: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

363:    Notes:
364:    Use the following to update the ghost regions with correct values from the owning process
365: .vb
366:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
367:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
368: .ve

370:    Use the following to accumulate the ghost region values onto the owning processors
371: .vb
372:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
373:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
374: .ve

376:    To accumulate the ghost region values onto the owning processors and then update
377:    the ghost regions correctly, call the later followed by the former, i.e.,
378: .vb
379:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
380:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
381:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
382:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
383: .ve

385:    Level: advanced

387: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
388:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

390: @*/
391: PetscErrorCode  VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
392: {
393:   Vec_MPI        *v;


399:   v  = (Vec_MPI*)g->data;
400:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
401:   if (!v->localupdate) return(0);
402: 
403:   if (scattermode == SCATTER_REVERSE) {
404:     VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);
405:   } else {
406:     VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);
407:   }
408:   return(0);
409: }

413: /*@
414:    VecGhostUpdateEnd - End the vector scatter to update the vector from
415:    local representation to global or global representation to local.

417:    Collective on Vec

419:    Input Parameters:
420: +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
421: .  insertmode - one of ADD_VALUES or INSERT_VALUES
422: -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE

424:    Notes:

426:    Use the following to update the ghost regions with correct values from the owning process
427: .vb
428:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
429:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
430: .ve

432:    Use the following to accumulate the ghost region values onto the owning processors
433: .vb
434:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
435:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
436: .ve

438:    To accumulate the ghost region values onto the owning processors and then update
439:    the ghost regions correctly, call the later followed by the former, i.e.,
440: .vb
441:        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
442:        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
443:        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
444:        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
445: .ve

447:    Level: advanced

449: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
450:           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()

452: @*/
453: PetscErrorCode  VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
454: {
455:   Vec_MPI        *v;


461:   v  = (Vec_MPI*)g->data;
462:   if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
463:   if (!v->localupdate) return(0);

465:   if (scattermode == SCATTER_REVERSE) {
466:     VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);
467:   } else {
468:     VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);
469:   }
470:   return(0);
471: }

475: /*@C
476:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
477:    the caller allocates the array space.

479:    Collective on MPI_Comm

481:    Input Parameters:
482: +  comm - the MPI communicator to use
483: .  n - local vector length 
484: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
485: .  nghost - number of local ghost points
486: .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
487: -  array - the space to store the vector values (as long as n + nghost)

489:    Output Parameter:
490: .  vv - the global vector representation (without ghost points as part of vector)
491:  
492:    Notes:
493:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
494:    of the vector.

496:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

498:    Level: advanced

500:    Concepts: vectors^creating with array
501:    Concepts: vectors^ghosted

503: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
504:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
505:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

507: @*/
508: PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
509: {
510:   PetscErrorCode         ierr;
511:   Vec_MPI                *w;
512:   PetscScalar            *larray;
513:   IS                     from,to;
514:   ISLocalToGlobalMapping ltog;
515:   PetscInt               rstart,i,*indices;

518:   *vv = 0;

520:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
521:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
522:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
523:   PetscSplitOwnership(comm,&n,&N);
524:   /* Create global representation */
525:   VecCreate(comm,vv);
526:   VecSetSizes(*vv,n,N);
527:   VecCreate_MPI_Private(*vv,nghost,array);
528:   w    = (Vec_MPI *)(*vv)->data;
529:   /* Create local representation */
530:   VecGetArray(*vv,&larray);
531:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
532:   PetscLogObjectParent(*vv,w->localrep);
533:   VecRestoreArray(*vv,&larray);

535:   /*
536:        Create scatter context for scattering (updating) ghost values 
537:   */
538:   ISCreateGeneral(comm,nghost,ghosts,&from);
539:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
540:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
541:   PetscLogObjectParent(*vv,w->localupdate);
542:   ISDestroy(to);
543:   ISDestroy(from);

545:   /* set local to global mapping for ghosted vector */
546:   PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
547:   VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
548:   for (i=0; i<n; i++) {
549:     indices[i] = rstart + i;
550:   }
551:   for (i=0; i<nghost; i++) {
552:     indices[n+i] = ghosts[i];
553:   }
554:   ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);
555:   PetscFree(indices);
556:   VecSetLocalToGlobalMapping(*vv,ltog);
557:   ISLocalToGlobalMappingDestroy(ltog);
558:   PetscFree(indices);
559:   return(0);
560: }

564: /*@
565:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

567:    Collective on MPI_Comm

569:    Input Parameters:
570: +  comm - the MPI communicator to use
571: .  n - local vector length 
572: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
573: .  nghost - number of local ghost points
574: -  ghosts - global indices of ghost points

576:    Output Parameter:
577: .  vv - the global vector representation (without ghost points as part of vector)
578:  
579:    Notes:
580:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
581:    of the vector.

583:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

585:    Level: advanced

587:    Concepts: vectors^ghosted

589: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
590:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
591:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
592:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

594: @*/
595: PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
596: {

600:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
601:   return(0);
602: }

606: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
607: {
609:   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
610:   PetscScalar    *array;

613:   VecCreate(((PetscObject)win)->comm,v);
614:   VecSetSizes(*v,win->map.n,win->map.N);
615:   VecCreate_MPI_Private(*v,w->nghost,0);
616:   vw   = (Vec_MPI *)(*v)->data;
617:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

619:   /* save local representation of the parallel vector (and scatter) if it exists */
620:   if (w->localrep) {
621:     VecGetArray(*v,&array);
622:     VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);
623:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
624:     VecRestoreArray(*v,&array);
625:     PetscLogObjectParent(*v,vw->localrep);
626:     vw->localupdate = w->localupdate;
627:     if (vw->localupdate) {
628:       PetscObjectReference((PetscObject)vw->localupdate);
629:     }
630:   }

632:   /* New vector should inherit stashing property of parent */
633:   (*v)->stash.donotstash = win->stash.donotstash;
634:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
635: 
636:   PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
637:   PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
638:   if (win->mapping) {
639:     PetscObjectReference((PetscObject)win->mapping);
640:     (*v)->mapping = win->mapping;
641:   }
642:   if (win->bmapping) {
643:     PetscObjectReference((PetscObject)win->bmapping);
644:     (*v)->bmapping = win->bmapping;
645:   }
646:   (*v)->map.bs    = win->map.bs;
647:   (*v)->bstash.bs = win->bstash.bs;

649:   return(0);
650: }

652: /* ------------------------------------------------------------------------------------------*/
655: /*@C
656:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
657:    the caller allocates the array space. Indices in the ghost region are based on blocks.

659:    Collective on MPI_Comm

661:    Input Parameters:
662: +  comm - the MPI communicator to use
663: .  bs - block size
664: .  n - local vector length 
665: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
666: .  nghost - number of local ghost blocks
667: .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
668: -  array - the space to store the vector values (as long as n + nghost*bs)

670:    Output Parameter:
671: .  vv - the global vector representation (without ghost points as part of vector)
672:  
673:    Notes:
674:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
675:    of the vector.

677:    n is the local vector size (total local size not the number of blocks) while nghost
678:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
679:    portion is bs*nghost

681:    Level: advanced

683:    Concepts: vectors^creating ghosted
684:    Concepts: vectors^creating with array

686: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), 
687:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
688:           VecCreateGhostWithArray(), VecCreateGhostBlocked()

690: @*/
691: PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
692: {
694:   Vec_MPI        *w;
695:   PetscScalar    *larray;
696:   IS             from,to;

699:   *vv = 0;

701:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
702:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
703:   if (nghost < 0)             SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
704:   PetscSplitOwnership(comm,&n,&N);
705:   /* Create global representation */
706:   VecCreate(comm,vv);
707:   VecSetSizes(*vv,n,N);
708:   VecCreate_MPI_Private(*vv,nghost*bs,array);
709:   VecSetBlockSize(*vv,bs);
710:   w    = (Vec_MPI *)(*vv)->data;
711:   /* Create local representation */
712:   VecGetArray(*vv,&larray);
713:   VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
714:   VecSetBlockSize(w->localrep,bs);
715:   PetscLogObjectParent(*vv,w->localrep);
716:   VecRestoreArray(*vv,&larray);

718:   /*
719:        Create scatter context for scattering (updating) ghost values 
720:   */
721:   ISCreateBlock(comm,bs,nghost,ghosts,&from);
722:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
723:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
724:   PetscLogObjectParent(*vv,w->localupdate);
725:   ISDestroy(to);
726:   ISDestroy(from);

728:   return(0);
729: }

733: /*@
734:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
735:         The indicing of the ghost points is done with blocks.

737:    Collective on MPI_Comm

739:    Input Parameters:
740: +  comm - the MPI communicator to use
741: .  bs - the block size
742: .  n - local vector length 
743: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
744: .  nghost - number of local ghost blocks
745: -  ghosts - global indices of ghost blocks

747:    Output Parameter:
748: .  vv - the global vector representation (without ghost points as part of vector)
749:  
750:    Notes:
751:    Use VecGhostGetLocalForm() to access the local, ghosted representation 
752:    of the vector.

754:    n is the local vector size (total local size not the number of blocks) while nghost
755:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
756:    portion is bs*nghost

758:    Level: advanced

760:    Concepts: vectors^ghosted

762: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
763:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
764:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

766: @*/
767: PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
768: {

772:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
773:   return(0);
774: }

776: /*
777:     These introduce a ghosted vector where the ghosting is determined by the call to 
778:   VecSetLocalToGlobalMapping()
779: */

783: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
784: {
786:   Vec_MPI        *v = (Vec_MPI *)vv->data;

789:   v->nghost = map->n - vv->map.n;

791:   /* we need to make longer the array space that was allocated when the vector was created */
792:   PetscFree(v->array_allocated);
793:   PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
794:   v->array = v->array_allocated;
795: 
796:   /* Create local representation */
797:   VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
798:   PetscLogObjectParent(vv,v->localrep);
799:   return(0);
800: }


805: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
806: {
808:   Vec_MPI        *v = (Vec_MPI *)vv->data;

811:   VecSetValues(v->localrep,n,ix,values,mode);
812:   return(0);
813: }

818: PetscErrorCode  VecCreate_FETI(Vec vv)
819: {

823:   VecSetType(vv,VECMPI);
824: 
825:   /* overwrite the functions to handle setting values locally */
826:   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
827:   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
828:   vv->ops->assemblybegin           = 0;
829:   vv->ops->assemblyend             = 0;
830:   vv->ops->setvaluesblocked        = 0;
831:   vv->ops->setvaluesblocked        = 0;

833:   return(0);
834: }