Actual source code: pbvec.c
1: #define PETSCVEC_DLL
2: /*
3: This file contains routines for Parallel vector operations.
4: */
5: #include src/vec/vec/impls/mpi/pvecimpl.h
7: #if 0
10: static PetscErrorCode VecPublish_MPI(PetscObject obj)
11: {
13: return(0);
14: }
15: #endif
19: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20: {
21: PetscScalar sum,work;
25: VecDot_Seq(xin,yin,&work);
26: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);
27: *z = sum;
28: return(0);
29: }
33: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34: {
35: PetscScalar sum,work;
39: VecTDot_Seq(xin,yin,&work);
40: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,((PetscObject)xin)->comm);
41: *z = sum;
42: return(0);
43: }
47: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48: {
50: if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51: v->stash.donotstash = flag;
52: } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53: v->stash.ignorenegidx = flag;
54: }
55: return(0);
56: }
57:
58: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
65: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66: {
68: Vec_MPI *v = (Vec_MPI *)vin->data;
71: if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72: v->unplacedarray = v->array; /* save previous array so reset can bring it back */
73: v->array = (PetscScalar *)a;
74: if (v->localrep) {
75: VecPlaceArray(v->localrep,a);
76: }
77: return(0);
78: }
80: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
81: EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
83: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
84: VecDuplicateVecs_Default,
85: VecDestroyVecs_Default,
86: VecDot_MPI,
87: VecMDot_MPI,
88: VecNorm_MPI,
89: VecTDot_MPI,
90: VecMTDot_MPI,
91: VecScale_Seq,
92: VecCopy_Seq, /* 10 */
93: VecSet_Seq,
94: VecSwap_Seq,
95: VecAXPY_Seq,
96: VecAXPBY_Seq,
97: VecMAXPY_Seq,
98: VecAYPX_Seq,
99: VecWAXPY_Seq,
100: VecPointwiseMult_Seq,
101: VecPointwiseDivide_Seq,
102: VecSetValues_MPI, /* 20 */
103: VecAssemblyBegin_MPI,
104: VecAssemblyEnd_MPI,
105: VecGetArray_Seq,
106: VecGetSize_MPI,
107: VecGetSize_Seq,
108: VecRestoreArray_Seq,
109: VecMax_MPI,
110: VecMin_MPI,
111: VecSetRandom_Seq,
112: VecSetOption_MPI,
113: VecSetValuesBlocked_MPI,
114: VecDestroy_MPI,
115: VecView_MPI,
116: VecPlaceArray_MPI,
117: VecReplaceArray_Seq,
118: VecDot_Seq,
119: VecTDot_Seq,
120: VecNorm_Seq,
121: VecMDot_Seq,
122: VecMTDot_Seq,
123: VecLoadIntoVector_Default,
124: VecReciprocal_Default,
125: 0, /* VecViewNative... */
126: VecConjugate_Seq,
127: 0,
128: 0,
129: VecResetArray_Seq,
130: 0,
131: VecMaxPointwiseDivide_Seq,
132: VecLoad_Binary,
133: VecPointwiseMax_Seq,
134: VecPointwiseMaxAbs_Seq,
135: VecPointwiseMin_Seq,
136: VecGetValues_MPI};
140: /*
141: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
142: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
143: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
145: If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
146: no space is allocated.
147: */
148: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
149: {
150: Vec_MPI *s;
155: PetscNewLog(v,Vec_MPI,&s);
156: v->data = (void*)s;
157: PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
158: s->nghost = nghost;
159: v->mapping = 0;
160: v->bmapping = 0;
161: v->petscnative = PETSC_TRUE;
163: if (v->map.bs == -1) v->map.bs = 1;
164: PetscMapSetUp(&v->map);
165: s->array = (PetscScalar *)array;
166: s->array_allocated = 0;
167: if (alloc) {
168: PetscInt n = v->map.n+nghost;
169: PetscMalloc(n*sizeof(PetscScalar),&s->array);
170: PetscLogObjectMemory(v,n*sizeof(PetscScalar));
171: PetscMemzero(s->array,v->map.n*sizeof(PetscScalar));
172: s->array_allocated = s->array;
173: }
175: /* By default parallel vectors do not have local representation */
176: s->localrep = 0;
177: s->localupdate = 0;
179: v->stash.insertmode = NOT_SET_VALUES;
180: /* create the stashes. The block-size for bstash is set later when
181: VecSetValuesBlocked is called.
182: */
183: VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);
184: VecStashCreate_Private(((PetscObject)v)->comm,v->map.bs,&v->bstash);
185:
186: #if defined(PETSC_HAVE_MATLAB_ENGINE)
187: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
188: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
189: #endif
190: PetscObjectChangeTypeName((PetscObject)v,VECMPI);
191: PetscPublishAll(v);
192: return(0);
193: }
195: /*MC
196: VECMPI - VECMPI = "mpi" - The basic parallel vector
198: Options Database Keys:
199: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
201: Level: beginner
203: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
204: M*/
209: PetscErrorCode VecCreate_MPI(Vec vv)
210: {
214: VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);
215: return(0);
216: }
221: /*@C
222: VecCreateMPIWithArray - Creates a parallel, array-style vector,
223: where the user provides the array space to store the vector values.
225: Collective on MPI_Comm
227: Input Parameters:
228: + comm - the MPI communicator to use
229: . n - local vector length, cannot be PETSC_DECIDE
230: . N - global vector length (or PETSC_DECIDE to have calculated)
231: - array - the user provided array to store the vector values
233: Output Parameter:
234: . vv - the vector
235:
236: Notes:
237: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
238: same type as an existing vector.
240: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
241: at a later stage to SET the array for storing the vector values.
243: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
244: The user should not free the array until the vector is destroyed.
246: Level: intermediate
248: Concepts: vectors^creating with array
250: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
251: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
253: @*/
254: PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
255: {
259: if (n == PETSC_DECIDE) {
260: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
261: }
262: PetscSplitOwnership(comm,&n,&N);
263: VecCreate(comm,vv);
264: VecSetSizes(*vv,n,N);
265: VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);
266: return(0);
267: }
271: /*@
272: VecGhostGetLocalForm - Obtains the local ghosted representation of
273: a parallel vector created with VecCreateGhost().
275: Not Collective
277: Input Parameter:
278: . g - the global vector. Vector must be have been obtained with either
279: VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
281: Output Parameter:
282: . l - the local (ghosted) representation
284: Notes:
285: This routine does not actually update the ghost values, but rather it
286: returns a sequential vector that includes the locations for the ghost
287: values and their current values. The returned vector and the original
288: vector passed in share the same array that contains the actual vector data.
290: One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
291: finished using the object.
293: Level: advanced
295: Concepts: vectors^ghost point access
297: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
299: @*/
300: PetscErrorCode VecGhostGetLocalForm(Vec g,Vec *l)
301: {
303: PetscTruth isseq,ismpi;
309: PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
310: PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
311: if (ismpi) {
312: Vec_MPI *v = (Vec_MPI*)g->data;
313: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
314: *l = v->localrep;
315: } else if (isseq) {
316: *l = g;
317: } else {
318: SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
319: }
320: PetscObjectReference((PetscObject)*l);
321: return(0);
322: }
326: /*@
327: VecGhostRestoreLocalForm - Restores the local ghosted representation of
328: a parallel vector obtained with VecGhostGetLocalForm().
330: Not Collective
332: Input Parameter:
333: + g - the global vector
334: - l - the local (ghosted) representation
336: Notes:
337: This routine does not actually update the ghost values, but rather it
338: returns a sequential vector that includes the locations for the ghost values
339: and their current values.
341: Level: advanced
343: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
344: @*/
345: PetscErrorCode VecGhostRestoreLocalForm(Vec g,Vec *l)
346: {
348: PetscObjectDereference((PetscObject)*l);
349: return(0);
350: }
354: /*@
355: VecGhostUpdateBegin - Begins the vector scatter to update the vector from
356: local representation to global or global representation to local.
358: Collective on Vec
360: Input Parameters:
361: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
362: . insertmode - one of ADD_VALUES or INSERT_VALUES
363: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
365: Notes:
366: Use the following to update the ghost regions with correct values from the owning process
367: .vb
368: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
369: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
370: .ve
372: Use the following to accumulate the ghost region values onto the owning processors
373: .vb
374: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
375: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
376: .ve
378: To accumulate the ghost region values onto the owning processors and then update
379: the ghost regions correctly, call the later followed by the former, i.e.,
380: .vb
381: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
382: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
383: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
384: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
385: .ve
387: Level: advanced
389: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
390: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
392: @*/
393: PetscErrorCode VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
394: {
395: Vec_MPI *v;
401: v = (Vec_MPI*)g->data;
402: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
403: if (!v->localupdate) return(0);
404:
405: if (scattermode == SCATTER_REVERSE) {
406: VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);
407: } else {
408: VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);
409: }
410: return(0);
411: }
415: /*@
416: VecGhostUpdateEnd - End the vector scatter to update the vector from
417: local representation to global or global representation to local.
419: Collective on Vec
421: Input Parameters:
422: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
423: . insertmode - one of ADD_VALUES or INSERT_VALUES
424: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
426: Notes:
428: Use the following to update the ghost regions with correct values from the owning process
429: .vb
430: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
431: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
432: .ve
434: Use the following to accumulate the ghost region values onto the owning processors
435: .vb
436: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
437: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
438: .ve
440: To accumulate the ghost region values onto the owning processors and then update
441: the ghost regions correctly, call the later followed by the former, i.e.,
442: .vb
443: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
444: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
445: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
446: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
447: .ve
449: Level: advanced
451: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
452: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
454: @*/
455: PetscErrorCode VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
456: {
457: Vec_MPI *v;
463: v = (Vec_MPI*)g->data;
464: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
465: if (!v->localupdate) return(0);
467: if (scattermode == SCATTER_REVERSE) {
468: VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);
469: } else {
470: VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);
471: }
472: return(0);
473: }
477: /*@C
478: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
479: the caller allocates the array space.
481: Collective on MPI_Comm
483: Input Parameters:
484: + comm - the MPI communicator to use
485: . n - local vector length
486: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
487: . nghost - number of local ghost points
488: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
489: - array - the space to store the vector values (as long as n + nghost)
491: Output Parameter:
492: . vv - the global vector representation (without ghost points as part of vector)
493:
494: Notes:
495: Use VecGhostGetLocalForm() to access the local, ghosted representation
496: of the vector.
498: This also automatically sets the ISLocalToGlobalMapping() for this vector.
500: Level: advanced
502: Concepts: vectors^creating with array
503: Concepts: vectors^ghosted
505: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
506: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
507: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
509: @*/
510: PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
511: {
512: PetscErrorCode ierr;
513: Vec_MPI *w;
514: PetscScalar *larray;
515: IS from,to;
516: ISLocalToGlobalMapping ltog;
517: PetscInt rstart,i,*indices;
520: *vv = 0;
522: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
523: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
524: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
525: PetscSplitOwnership(comm,&n,&N);
526: /* Create global representation */
527: VecCreate(comm,vv);
528: VecSetSizes(*vv,n,N);
529: VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);
530: w = (Vec_MPI *)(*vv)->data;
531: /* Create local representation */
532: VecGetArray(*vv,&larray);
533: VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
534: PetscLogObjectParent(*vv,w->localrep);
535: VecRestoreArray(*vv,&larray);
537: /*
538: Create scatter context for scattering (updating) ghost values
539: */
540: ISCreateGeneral(comm,nghost,ghosts,&from);
541: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
542: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
543: PetscLogObjectParent(*vv,w->localupdate);
544: ISDestroy(to);
545: ISDestroy(from);
547: /* set local to global mapping for ghosted vector */
548: PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
549: VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
550: for (i=0; i<n; i++) {
551: indices[i] = rstart + i;
552: }
553: for (i=0; i<nghost; i++) {
554: indices[n+i] = ghosts[i];
555: }
556: ISLocalToGlobalMappingCreate(comm,n+nghost,indices,<og);
557: PetscFree(indices);
558: VecSetLocalToGlobalMapping(*vv,ltog);
559: ISLocalToGlobalMappingDestroy(ltog);
560: PetscFree(indices);
561: return(0);
562: }
566: /*@
567: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
569: Collective on MPI_Comm
571: Input Parameters:
572: + comm - the MPI communicator to use
573: . n - local vector length
574: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
575: . nghost - number of local ghost points
576: - ghosts - global indices of ghost points
578: Output Parameter:
579: . vv - the global vector representation (without ghost points as part of vector)
580:
581: Notes:
582: Use VecGhostGetLocalForm() to access the local, ghosted representation
583: of the vector.
585: This also automatically sets the ISLocalToGlobalMapping() for this vector.
587: Level: advanced
589: Concepts: vectors^ghosted
591: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
592: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
593: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
594: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
596: @*/
597: PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
598: {
602: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
603: return(0);
604: }
608: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
609: {
611: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
612: PetscScalar *array;
615: VecCreate(((PetscObject)win)->comm,v);
616: VecSetSizes(*v,win->map.n,win->map.N);
617: VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);
618: vw = (Vec_MPI *)(*v)->data;
619: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
621: /* save local representation of the parallel vector (and scatter) if it exists */
622: if (w->localrep) {
623: VecGetArray(*v,&array);
624: VecCreateSeqWithArray(PETSC_COMM_SELF,win->map.n+w->nghost,array,&vw->localrep);
625: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
626: VecRestoreArray(*v,&array);
627: PetscLogObjectParent(*v,vw->localrep);
628: vw->localupdate = w->localupdate;
629: if (vw->localupdate) {
630: PetscObjectReference((PetscObject)vw->localupdate);
631: }
632: }
634: /* New vector should inherit stashing property of parent */
635: (*v)->stash.donotstash = win->stash.donotstash;
636: (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
637:
638: PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
639: PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
640: if (win->mapping) {
641: PetscObjectReference((PetscObject)win->mapping);
642: (*v)->mapping = win->mapping;
643: }
644: if (win->bmapping) {
645: PetscObjectReference((PetscObject)win->bmapping);
646: (*v)->bmapping = win->bmapping;
647: }
648: (*v)->map.bs = win->map.bs;
649: (*v)->bstash.bs = win->bstash.bs;
651: return(0);
652: }
654: /* ------------------------------------------------------------------------------------------*/
657: /*@C
658: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
659: the caller allocates the array space. Indices in the ghost region are based on blocks.
661: Collective on MPI_Comm
663: Input Parameters:
664: + comm - the MPI communicator to use
665: . bs - block size
666: . n - local vector length
667: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
668: . nghost - number of local ghost blocks
669: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
670: - array - the space to store the vector values (as long as n + nghost*bs)
672: Output Parameter:
673: . vv - the global vector representation (without ghost points as part of vector)
674:
675: Notes:
676: Use VecGhostGetLocalForm() to access the local, ghosted representation
677: of the vector.
679: n is the local vector size (total local size not the number of blocks) while nghost
680: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
681: portion is bs*nghost
683: Level: advanced
685: Concepts: vectors^creating ghosted
686: Concepts: vectors^creating with array
688: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
689: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
690: VecCreateGhostWithArray(), VecCreateGhostBlocked()
692: @*/
693: PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
694: {
696: Vec_MPI *w;
697: PetscScalar *larray;
698: IS from,to;
701: *vv = 0;
703: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
704: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
705: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
706: PetscSplitOwnership(comm,&n,&N);
707: /* Create global representation */
708: VecCreate(comm,vv);
709: VecSetSizes(*vv,n,N);
710: VecCreate_MPI_Private(*vv,PETSC_FALSE,nghost*bs,array);
711: VecSetBlockSize(*vv,bs);
712: w = (Vec_MPI *)(*vv)->data;
713: /* Create local representation */
714: VecGetArray(*vv,&larray);
715: VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
716: VecSetBlockSize(w->localrep,bs);
717: PetscLogObjectParent(*vv,w->localrep);
718: VecRestoreArray(*vv,&larray);
720: /*
721: Create scatter context for scattering (updating) ghost values
722: */
723: ISCreateBlock(comm,bs,nghost,ghosts,&from);
724: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
725: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
726: PetscLogObjectParent(*vv,w->localupdate);
727: ISDestroy(to);
728: ISDestroy(from);
730: return(0);
731: }
735: /*@
736: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
737: The indicing of the ghost points is done with blocks.
739: Collective on MPI_Comm
741: Input Parameters:
742: + comm - the MPI communicator to use
743: . bs - the block size
744: . n - local vector length
745: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
746: . nghost - number of local ghost blocks
747: - ghosts - global indices of ghost blocks
749: Output Parameter:
750: . vv - the global vector representation (without ghost points as part of vector)
751:
752: Notes:
753: Use VecGhostGetLocalForm() to access the local, ghosted representation
754: of the vector.
756: n is the local vector size (total local size not the number of blocks) while nghost
757: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
758: portion is bs*nghost
760: Level: advanced
762: Concepts: vectors^ghosted
764: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
765: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
766: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
768: @*/
769: PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
770: {
774: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
775: return(0);
776: }
778: /*
779: These introduce a ghosted vector where the ghosting is determined by the call to
780: VecSetLocalToGlobalMapping()
781: */
785: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
786: {
788: Vec_MPI *v = (Vec_MPI *)vv->data;
791: v->nghost = map->n - vv->map.n;
793: /* we need to make longer the array space that was allocated when the vector was created */
794: PetscFree(v->array_allocated);
795: PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
796: v->array = v->array_allocated;
797:
798: /* Create local representation */
799: VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
800: PetscLogObjectParent(vv,v->localrep);
801: return(0);
802: }
807: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
808: {
810: Vec_MPI *v = (Vec_MPI *)vv->data;
813: VecSetValues(v->localrep,n,ix,values,mode);
814: return(0);
815: }
820: PetscErrorCode VecCreate_FETI(Vec vv)
821: {
825: VecSetType(vv,VECMPI);
826:
827: /* overwrite the functions to handle setting values locally */
828: vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
829: vv->ops->setvalueslocal = VecSetValuesLocal_FETI;
830: vv->ops->assemblybegin = 0;
831: vv->ops->assemblyend = 0;
832: vv->ops->setvaluesblocked = 0;
833: vv->ops->setvaluesblocked = 0;
835: return(0);
836: }