Actual source code: pbvec.c
1: /*
2: This file contains routines for Parallel vector operations.
3: */
4: #include src/vec/impls/mpi/pvecimpl.h
6: /*
7: Note this code is very similar to VecPublish_Seq()
8: */
11: static PetscErrorCode VecPublish_MPI(PetscObject obj)
12: {
14: return(0);
15: }
19: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20: {
21: PetscScalar sum,work;
25: VecDot_Seq(xin,yin,&work);
26: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
27: *z = sum;
28: return(0);
29: }
33: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34: {
35: PetscScalar sum,work;
39: VecTDot_Seq(xin,yin,&work);
40: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
41: *z = sum;
42: return(0);
43: }
47: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op)
48: {
50: if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51: v->stash.donotstash = PETSC_TRUE;
52: } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
53: v->stash.donotstash = PETSC_FALSE;
54: }
55: return(0);
56: }
57:
58: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
65: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66: {
68: Vec_MPI *v = (Vec_MPI *)vin->data;
71: v->array = (PetscScalar *)a;
72: if (v->localrep) {
73: VecPlaceArray(v->localrep,a);
74: }
75: return(0);
76: }
78: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer,const VecType, Vec*);
80: static struct _VecOps DvOps = { VecDuplicate_MPI,
81: VecDuplicateVecs_Default,
82: VecDestroyVecs_Default,
83: VecDot_MPI,
84: VecMDot_MPI,
85: VecNorm_MPI,
86: VecTDot_MPI,
87: VecMTDot_MPI,
88: VecScale_Seq,
89: VecCopy_Seq,
90: VecSet_Seq,
91: VecSwap_Seq,
92: VecAXPY_Seq,
93: VecAXPBY_Seq,
94: VecMAXPY_Seq,
95: VecAYPX_Seq,
96: VecWAXPY_Seq,
97: VecPointwiseMult_Seq,
98: VecPointwiseDivide_Seq,
99: VecSetValues_MPI,
100: VecAssemblyBegin_MPI,
101: VecAssemblyEnd_MPI,
102: VecGetArray_Seq,
103: VecGetSize_MPI,
104: VecGetSize_Seq,
105: VecRestoreArray_Seq,
106: VecMax_MPI,
107: VecMin_MPI,
108: VecSetRandom_Seq,
109: VecSetOption_MPI,
110: VecSetValuesBlocked_MPI,
111: VecDestroy_MPI,
112: VecView_MPI,
113: VecPlaceArray_MPI,
114: VecReplaceArray_Seq,
115: VecDot_Seq,
116: VecTDot_Seq,
117: VecNorm_Seq,
118: VecLoadIntoVector_Default,
119: VecReciprocal_Default,
120: 0, /* VecViewNative... */
121: VecConjugate_Seq,
122: 0,
123: 0,
124: VecResetArray_Seq,
125: 0,
126: VecMaxPointwiseDivide_Seq,
127: VecLoad_Binary};
131: /*
132: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
133: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
134: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
135: */
136: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscInt nghost,const PetscScalar array[],PetscMap map)
137: {
138: Vec_MPI *s;
140: PetscMPIInt size,rank;
143: MPI_Comm_size(v->comm,&size);
144: MPI_Comm_rank(v->comm,&rank);
146: v->bops->publish = VecPublish_MPI;
147: PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));
148: PetscNew(Vec_MPI,&s);
149: PetscMemzero(s,sizeof(Vec_MPI));
150: PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
151: v->data = (void*)s;
152: s->nghost = nghost;
153: v->mapping = 0;
154: v->bmapping = 0;
155: v->petscnative = PETSC_TRUE;
157: if (array) {
158: s->array = (PetscScalar *)array;
159: s->array_allocated = 0;
160: } else {
161: PetscInt n = v->n+nghost;
162: PetscMalloc(n*sizeof(PetscScalar),&s->array);
163: s->array_allocated = s->array;
164: PetscMemzero(s->array,v->n*sizeof(PetscScalar));
165: }
167: /* By default parallel vectors do not have local representation */
168: s->localrep = 0;
169: s->localupdate = 0;
171: v->stash.insertmode = NOT_SET_VALUES;
173: if (!v->map) {
174: if (!map) {
175: PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);
176: } else {
177: v->map = map;
178: PetscObjectReference((PetscObject)map);
179: }
180: }
181: /* create the stashes. The block-size for bstash is set later when
182: VecSetValuesBlocked is called.
183: */
184: VecStashCreate_Private(v->comm,1,&v->stash);
185: VecStashCreate_Private(v->comm,v->bs,&v->bstash);
186:
187: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
188: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
189: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
190: #endif
191: PetscObjectChangeTypeName((PetscObject)v,VECMPI);
192: PetscPublishAll(v);
193: return(0);
194: }
196: /*MC
197: VECMPI - VECMPI = "mpi" - The basic parallel vector
199: Options Database Keys:
200: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
202: Level: beginner
204: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
205: M*/
210: PetscErrorCode VecCreate_MPI(Vec vv)
211: {
215: if (vv->bs > 0) {
216: PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);
217: } else {
218: PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
219: }
220: VecCreate_MPI_Private(vv,0,0,PETSC_NULL);
221: return(0);
222: }
227: /*@C
228: VecCreateMPIWithArray - Creates a parallel, array-style vector,
229: where the user provides the array space to store the vector values.
231: Collective on MPI_Comm
233: Input Parameters:
234: + comm - the MPI communicator to use
235: . n - local vector length, cannot be PETSC_DECIDE
236: . N - global vector length (or PETSC_DECIDE to have calculated)
237: - array - the user provided array to store the vector values
239: Output Parameter:
240: . vv - the vector
241:
242: Notes:
243: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
244: same type as an existing vector.
246: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
247: at a later stage to SET the array for storing the vector values.
249: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
250: The user should not free the array until the vector is destroyed.
252: Level: intermediate
254: Concepts: vectors^creating with array
256: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
257: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
259: @*/
260: PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
261: {
265: if (n == PETSC_DECIDE) {
266: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
267: }
268: PetscSplitOwnership(comm,&n,&N);
269: VecCreate(comm,vv);
270: VecSetSizes(*vv,n,N);
271: VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);
272: return(0);
273: }
277: /*@C
278: VecGhostGetLocalForm - Obtains the local ghosted representation of
279: a parallel vector created with VecCreateGhost().
281: Not Collective
283: Input Parameter:
284: . g - the global vector. Vector must be have been obtained with either
285: VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
287: Output Parameter:
288: . l - the local (ghosted) representation
290: Notes:
291: This routine does not actually update the ghost values, but rather it
292: returns a sequential vector that includes the locations for the ghost
293: values and their current values. The returned vector and the original
294: vector passed in share the same array that contains the actual vector data.
296: One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
297: finished using the object.
299: Level: advanced
301: Concepts: vectors^ghost point access
303: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
305: @*/
306: PetscErrorCode VecGhostGetLocalForm(Vec g,Vec *l)
307: {
309: PetscTruth isseq,ismpi;
315: PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
316: PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
317: if (ismpi) {
318: Vec_MPI *v = (Vec_MPI*)g->data;
319: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
320: *l = v->localrep;
321: } else if (isseq) {
322: *l = g;
323: } else {
324: SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",g->type_name);
325: }
326: PetscObjectReference((PetscObject)*l);
327: return(0);
328: }
332: /*@C
333: VecGhostRestoreLocalForm - Restores the local ghosted representation of
334: a parallel vector obtained with VecGhostGetLocalForm().
336: Not Collective
338: Input Parameter:
339: + g - the global vector
340: - l - the local (ghosted) representation
342: Notes:
343: This routine does not actually update the ghost values, but rather it
344: returns a sequential vector that includes the locations for the ghost values
345: and their current values.
347: Level: advanced
349: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
350: @*/
351: PetscErrorCode VecGhostRestoreLocalForm(Vec g,Vec *l)
352: {
354: PetscObjectDereference((PetscObject)*l);
355: return(0);
356: }
360: /*@
361: VecGhostUpdateBegin - Begins the vector scatter to update the vector from
362: local representation to global or global representation to local.
364: Collective on Vec
366: Input Parameters:
367: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
368: . insertmode - one of ADD_VALUES or INSERT_VALUES
369: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
371: Notes:
372: Use the following to update the ghost regions with correct values from the owning process
373: .vb
374: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
375: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
376: .ve
378: Use the following to accumulate the ghost region values onto the owning processors
379: .vb
380: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
381: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
382: .ve
384: To accumulate the ghost region values onto the owning processors and then update
385: the ghost regions correctly, call the later followed by the former, i.e.,
386: .vb
387: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
388: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
389: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
390: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
391: .ve
393: Level: advanced
395: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
396: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
398: @*/
399: PetscErrorCode VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
400: {
401: Vec_MPI *v;
407: v = (Vec_MPI*)g->data;
408: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
409: if (!v->localupdate) return(0);
410:
411: if (scattermode == SCATTER_REVERSE) {
412: VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
413: } else {
414: VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
415: }
416: return(0);
417: }
421: /*@
422: VecGhostUpdateEnd - End the vector scatter to update the vector from
423: local representation to global or global representation to local.
425: Collective on Vec
427: Input Parameters:
428: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
429: . insertmode - one of ADD_VALUES or INSERT_VALUES
430: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
432: Notes:
434: Use the following to update the ghost regions with correct values from the owning process
435: .vb
436: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
437: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
438: .ve
440: Use the following to accumulate the ghost region values onto the owning processors
441: .vb
442: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
443: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
444: .ve
446: To accumulate the ghost region values onto the owning processors and then update
447: the ghost regions correctly, call the later followed by the former, i.e.,
448: .vb
449: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
450: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
451: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
452: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
453: .ve
455: Level: advanced
457: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
458: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
460: @*/
461: PetscErrorCode VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
462: {
463: Vec_MPI *v;
469: v = (Vec_MPI*)g->data;
470: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
471: if (!v->localupdate) return(0);
473: if (scattermode == SCATTER_REVERSE) {
474: VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
475: } else {
476: VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
477: }
478: return(0);
479: }
483: /*@C
484: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
485: the caller allocates the array space.
487: Collective on MPI_Comm
489: Input Parameters:
490: + comm - the MPI communicator to use
491: . n - local vector length
492: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
493: . nghost - number of local ghost points
494: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
495: - array - the space to store the vector values (as long as n + nghost)
497: Output Parameter:
498: . vv - the global vector representation (without ghost points as part of vector)
499:
500: Notes:
501: Use VecGhostGetLocalForm() to access the local, ghosted representation
502: of the vector.
504: Level: advanced
506: Concepts: vectors^creating with array
507: Concepts: vectors^ghosted
509: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
510: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
511: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
513: @*/
514: PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
515: {
517: Vec_MPI *w;
518: PetscScalar *larray;
519: IS from,to;
522: *vv = 0;
524: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
525: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
526: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
527: PetscSplitOwnership(comm,&n,&N);
528: /* Create global representation */
529: VecCreate(comm,vv);
530: VecSetSizes(*vv,n,N);
531: VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);
532: w = (Vec_MPI *)(*vv)->data;
533: /* Create local representation */
534: VecGetArray(*vv,&larray);
535: VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
536: PetscLogObjectParent(*vv,w->localrep);
537: VecRestoreArray(*vv,&larray);
539: /*
540: Create scatter context for scattering (updating) ghost values
541: */
542: ISCreateGeneral(comm,nghost,ghosts,&from);
543: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
544: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
545: PetscLogObjectParent(*vv,w->localupdate);
546: ISDestroy(to);
547: ISDestroy(from);
549: return(0);
550: }
554: /*@C
555: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
557: Collective on MPI_Comm
559: Input Parameters:
560: + comm - the MPI communicator to use
561: . n - local vector length
562: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
563: . nghost - number of local ghost points
564: - ghosts - global indices of ghost points
566: Output Parameter:
567: . vv - the global vector representation (without ghost points as part of vector)
568:
569: Notes:
570: Use VecGhostGetLocalForm() to access the local, ghosted representation
571: of the vector.
573: Level: advanced
575: Concepts: vectors^ghosted
577: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
578: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
579: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
580: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
582: @*/
583: PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
584: {
588: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
589: return(0);
590: }
594: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
595: {
597: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
598: PetscScalar *array;
601: VecCreate(win->comm,v);
602: VecSetSizes(*v,win->n,win->N);
603: VecCreate_MPI_Private(*v,w->nghost,0,win->map);
604: vw = (Vec_MPI *)(*v)->data;
605: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
607: /* save local representation of the parallel vector (and scatter) if it exists */
608: if (w->localrep) {
609: VecGetArray(*v,&array);
610: VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);
611: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
612: VecRestoreArray(*v,&array);
613: PetscLogObjectParent(*v,vw->localrep);
614: vw->localupdate = w->localupdate;
615: if (vw->localupdate) {
616: PetscObjectReference((PetscObject)vw->localupdate);
617: }
618: }
620: /* New vector should inherit stashing property of parent */
621: (*v)->stash.donotstash = win->stash.donotstash;
622:
623: PetscOListDuplicate(win->olist,&(*v)->olist);
624: PetscFListDuplicate(win->qlist,&(*v)->qlist);
625: if (win->mapping) {
626: (*v)->mapping = win->mapping;
627: PetscObjectReference((PetscObject)win->mapping);
628: }
629: if (win->bmapping) {
630: (*v)->bmapping = win->bmapping;
631: PetscObjectReference((PetscObject)win->bmapping);
632: }
633: (*v)->bs = win->bs;
634: (*v)->bstash.bs = win->bstash.bs;
636: return(0);
637: }
639: /* ------------------------------------------------------------------------------------------*/
642: /*@C
643: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
644: the caller allocates the array space. Indices in the ghost region are based on blocks.
646: Collective on MPI_Comm
648: Input Parameters:
649: + comm - the MPI communicator to use
650: . bs - block size
651: . n - local vector length
652: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
653: . nghost - number of local ghost blocks
654: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
655: - array - the space to store the vector values (as long as n + nghost*bs)
657: Output Parameter:
658: . vv - the global vector representation (without ghost points as part of vector)
659:
660: Notes:
661: Use VecGhostGetLocalForm() to access the local, ghosted representation
662: of the vector.
664: n is the local vector size (total local size not the number of blocks) while nghost
665: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
666: portion is bs*nghost
668: Level: advanced
670: Concepts: vectors^creating ghosted
671: Concepts: vectors^creating with array
673: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
674: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
675: VecCreateGhostWithArray(), VecCreateGhostBlocked()
677: @*/
678: PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
679: {
681: Vec_MPI *w;
682: PetscScalar *larray;
683: IS from,to;
686: *vv = 0;
688: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
689: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
690: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
691: PetscSplitOwnership(comm,&n,&N);
692: /* Create global representation */
693: VecCreate(comm,vv);
694: VecSetSizes(*vv,n,N);
695: VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);
696: VecSetBlockSize(*vv,bs);
697: w = (Vec_MPI *)(*vv)->data;
698: /* Create local representation */
699: VecGetArray(*vv,&larray);
700: VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
701: VecSetBlockSize(w->localrep,bs);
702: PetscLogObjectParent(*vv,w->localrep);
703: VecRestoreArray(*vv,&larray);
705: /*
706: Create scatter context for scattering (updating) ghost values
707: */
708: ISCreateBlock(comm,bs,nghost,ghosts,&from);
709: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
710: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
711: PetscLogObjectParent(*vv,w->localupdate);
712: ISDestroy(to);
713: ISDestroy(from);
715: return(0);
716: }
720: /*@C
721: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
722: The indicing of the ghost points is done with blocks.
724: Collective on MPI_Comm
726: Input Parameters:
727: + comm - the MPI communicator to use
728: . bs - the block size
729: . n - local vector length
730: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
731: . nghost - number of local ghost blocks
732: - ghosts - global indices of ghost blocks
734: Output Parameter:
735: . vv - the global vector representation (without ghost points as part of vector)
736:
737: Notes:
738: Use VecGhostGetLocalForm() to access the local, ghosted representation
739: of the vector.
741: n is the local vector size (total local size not the number of blocks) while nghost
742: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
743: portion is bs*nghost
745: Level: advanced
747: Concepts: vectors^ghosted
749: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
750: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
751: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
753: @*/
754: PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
755: {
759: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
760: return(0);
761: }
763: /*
764: These introduce a ghosted vector where the ghosting is determined by the call to
765: VecSetLocalToGlobalMapping()
766: */
770: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
771: {
773: Vec_MPI *v = (Vec_MPI *)vv->data;
776: v->nghost = map->n - vv->n;
778: /* we need to make longer the array space that was allocated when the vector was created */
779: PetscFree(v->array_allocated);
780: PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
781: v->array = v->array_allocated;
782:
783: /* Create local representation */
784: VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
785: PetscLogObjectParent(vv,v->localrep);
787: return(0);
788: }
793: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
794: {
796: Vec_MPI *v = (Vec_MPI *)vv->data;
799: VecSetValues(v->localrep,n,ix,values,mode);
800: return(0);
801: }
806: PetscErrorCode VecCreate_FETI(Vec vv)
807: {
811: VecSetType(vv,VECMPI);
812:
813: /* overwrite the functions to handle setting values locally */
814: vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
815: vv->ops->setvalueslocal = VecSetValuesLocal_FETI;
816: vv->ops->assemblybegin = 0;
817: vv->ops->assemblyend = 0;
818: vv->ops->setvaluesblocked = 0;
819: vv->ops->setvaluesblocked = 0;
821: return(0);
822: }