Actual source code: pbvec.c
1: /*$Id: pbvec.c,v 1.166 2001/04/10 19:34:57 bsmith Exp $*/
3: /*
4: This file contains routines for Parallel vector operations.
5: */
6: #include "src/vec/impls/mpi/pvecimpl.h" /*I "petscvec.h" I*/
8: /*
9: Note this code is very similar to VecPublish_Seq()
10: */
11: static int VecPublish_MPI(PetscObject obj)
12: {
13: #if defined(PETSC_HAVE_AMS)
14: Vec v = (Vec) obj;
15: Vec_MPI *s = (Vec_MPI*)v->data;
16: int ierr,(*f)(AMS_Memory,char *,Vec);
17: #endif
20: #if defined(PETSC_HAVE_AMS)
21: /* if it is already published then return */
22: if (v->amem >=0) return(0);
24: PetscObjectPublishBaseBegin(obj);
25: AMS_Memory_add_field((AMS_Memory)v->amem,"values",s->array,v->n,AMS_DOUBLE,AMS_READ,
26: AMS_DISTRIBUTED,AMS_REDUCT_UNDEF);
28: /*
29: If the vector knows its "layout" let it set it, otherwise it defaults
30: to correct 1d distribution
31: */
32: PetscObjectQueryFunction(obj,"AMSSetFieldBlock_C",(void (**)())&f);
33: if (f) {
34: (*f)((AMS_Memory)v->amem,"values",v);
35: }
36: PetscObjectPublishBaseEnd(obj);
37: #endif
38: return(0);
39: }
41: int VecDot_MPI(Vec xin,Vec yin,Scalar *z)
42: {
43: Scalar sum,work;
44: int ierr;
47: VecDot_Seq(xin,yin,&work);
48: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
49: *z = sum;
50: return(0);
51: }
53: int VecTDot_MPI(Vec xin,Vec yin,Scalar *z)
54: {
55: Scalar sum,work;
56: int ierr;
59: VecTDot_Seq(xin,yin,&work);
60: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
61: *z = sum;
62: return(0);
63: }
65: int VecSetOption_MPI(Vec v,VecOption op)
66: {
67: Vec_MPI *w = (Vec_MPI*)v->data;
70: if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
71: w->donotstash = PETSC_TRUE;
72: }
73: return(0);
74: }
75:
76: EXTERN int VecDuplicate_MPI(Vec,Vec *);
77: EXTERN_C_BEGIN
78: EXTERN int VecView_MPI_Draw(Vec,PetscViewer);
79: EXTERN_C_END
81: static struct _VecOps DvOps = { VecDuplicate_MPI,
82: VecDuplicateVecs_Default,
83: VecDestroyVecs_Default,
84: VecDot_MPI,
85: VecMDot_MPI,
86: VecNorm_MPI,
87: VecTDot_MPI,
88: VecMTDot_MPI,
89: VecScale_Seq,
90: VecCopy_Seq,
91: VecSet_Seq,
92: VecSwap_Seq,
93: VecAXPY_Seq,
94: VecAXPBY_Seq,
95: VecMAXPY_Seq,
96: VecAYPX_Seq,
97: VecWAXPY_Seq,
98: VecPointwiseMult_Seq,
99: VecPointwiseDivide_Seq,
100: VecSetValues_MPI,
101: VecAssemblyBegin_MPI,
102: VecAssemblyEnd_MPI,
103: VecGetArray_Seq,
104: VecGetSize_MPI,
105: VecGetSize_Seq,
106: VecGetOwnershipRange_MPI,
107: VecRestoreArray_Seq,
108: VecMax_MPI,VecMin_MPI,
109: VecSetRandom_Seq,
110: VecSetOption_MPI,
111: VecSetValuesBlocked_MPI,
112: VecDestroy_MPI,
113: VecView_MPI,
114: VecPlaceArray_Seq,
115: VecReplaceArray_Seq,
116: VecGetMap_Seq,
117: VecDot_Seq,
118: VecTDot_Seq,
119: VecNorm_Seq,
120: VecLoadIntoVector_Default,
121: VecReciprocal_Default,
122: 0, /* VecViewNative... */
123: VecConjugate_Seq,
124: 0,
125: 0,
126: VecResetArray_Seq};
128: /*
129: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
130: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
131: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
132: */
133: int VecCreate_MPI_Private(Vec v,int nghost,const Scalar array[],Map map)
134: {
135: Vec_MPI *s;
136: int ierr,size,rank;
139: MPI_Comm_size(v->comm,&size);
140: MPI_Comm_rank(v->comm,&rank);
142: v->bops->publish = VecPublish_MPI;
143: PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(Scalar));
144: ierr = PetscMalloc(sizeof(Vec_MPI),&s);
145: ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
146: v->data = (void*)s;
147: s->nghost = nghost;
148: v->mapping = 0;
149: v->bmapping = 0;
150: v->bs = -1;
151: s->size = size;
152: s->rank = rank;
153: s->browners = 0;
154: if (array) {
155: s->array = (Scalar *)array;
156: s->array_allocated = 0;
157: } else {
158: ierr = PetscMalloc((v->n+nghost+1)*sizeof(Scalar),&s->array);
159: s->array_allocated = s->array;
160: ierr = PetscMemzero(s->array,v->n*sizeof(Scalar));
161: }
163: /* By default parallel vectors do not have local representation */
164: s->localrep = 0;
165: s->localupdate = 0;
167: s->insertmode = NOT_SET_VALUES;
169: if (!v->map) {
170: if (!map) {
171: MapCreateMPI(v->comm,v->n,v->N,&v->map);
172: } else {
173: v->map = map;
174: PetscObjectReference((PetscObject)map);
175: }
176: }
177: /* create the stashes. The block-size for bstash is set later when
178: VecSetValuesBlocked is called.
179: */
180: VecStashCreate_Private(v->comm,1,&v->stash);
181: VecStashCreate_Private(v->comm,1,&v->bstash);
182: s->donotstash = PETSC_FALSE;
183:
184: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX)
185: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
186: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
187: #endif
188: PetscObjectChangeTypeName((PetscObject)v,VEC_MPI);
189: PetscPublishAll(v);
190: return(0);
191: }
193: EXTERN_C_BEGIN
194: int VecCreate_MPI(Vec vv)
195: {
199: PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
200: VecCreate_MPI_Private(vv,0,0,PETSC_NULL);
201: return(0);
202: }
203: EXTERN_C_END
205: /*@C
206: VecCreateMPIWithArray - Creates a parallel, array-style vector,
207: where the user provides the array space to store the vector values.
209: Collective on MPI_Comm
211: Input Parameters:
212: + comm - the MPI communicator to use
213: . n - local vector length, cannot be PETSC_DECIDE
214: . N - global vector length (or PETSC_DECIDE to have calculated)
215: - array - the user provided array to store the vector values
217: Output Parameter:
218: . vv - the vector
219:
220: Notes:
221: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
222: same type as an existing vector.
224: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
225: at a later stage to SET the array for storing the vector values.
227: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
228: The user should not free the array until the vector is destroyed.
230: Level: intermediate
232: Concepts: vectors^creating with array
234: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
235: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
237: @*/
238: int VecCreateMPIWithArray(MPI_Comm comm,int n,int N,const Scalar array[],Vec *vv)
239: {
243: if (n == PETSC_DECIDE) {
244: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
245: }
246: PetscSplitOwnership(comm,&n,&N);
247: VecCreate(comm,n,N,vv);
248: VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);
249: return(0);
250: }
252: /*@C
253: VecGhostGetLocalForm - Obtains the local ghosted representation of
254: a parallel vector created with VecCreateGhost().
256: Not Collective
258: Input Parameter:
259: . g - the global vector. Vector must be have been obtained with either
260: VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
262: Output Parameter:
263: . l - the local (ghosted) representation
265: Notes:
266: This routine does not actually update the ghost values, but rather it
267: returns a sequential vector that includes the locations for the ghost
268: values and their current values. The returned vector and the original
269: vector passed in share the same array that contains the actual vector data.
271: One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
272: finished using the object.
274: Level: advanced
276: Concepts: vectors^ghost point access
278: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
280: @*/
281: int VecGhostGetLocalForm(Vec g,Vec *l)
282: {
283: int ierr;
284: PetscTruth isseq,ismpi;
290: PetscTypeCompare((PetscObject)g,VEC_SEQ,&isseq);
291: PetscTypeCompare((PetscObject)g,VEC_MPI,&ismpi);
292: if (ismpi) {
293: Vec_MPI *v = (Vec_MPI*)g->data;
294: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
295: *l = v->localrep;
296: } else if (isseq) {
297: *l = g;
298: } else {
299: SETERRQ1(1,"Vector type %s does not have local representation",g->type_name);
300: }
301: PetscObjectReference((PetscObject)*l);
302: return(0);
303: }
305: /*@C
306: VecGhostRestoreLocalForm - Restores the local ghosted representation of
307: a parallel vector obtained with VecGhostGetLocalForm().
309: Not Collective
311: Input Parameter:
312: + g - the global vector
313: - l - the local (ghosted) representation
315: Notes:
316: This routine does not actually update the ghost values, but rather it
317: returns a sequential vector that includes the locations for the ghost values
318: and their current values.
320: Level: advanced
322: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
323: @*/
324: int VecGhostRestoreLocalForm(Vec g,Vec *l)
325: {
327: PetscObjectDereference((PetscObject)*l);
328: return(0);
329: }
331: /*@
332: VecGhostUpdateBegin - Begins the vector scatter to update the vector from
333: local representation to global or global representation to local.
335: Collective on Vec
337: Input Parameters:
338: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
339: . insertmode - one of ADD_VALUES or INSERT_VALUES
340: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
342: Notes:
343: Use the following to update the ghost regions with correct values from the owning process
344: .vb
345: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
346: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
347: .ve
349: Use the following to accumulate the ghost region values onto the owning processors
350: .vb
351: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
352: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
353: .ve
355: To accumulate the ghost region values onto the owning processors and then update
356: the ghost regions correctly, call the later followed by the former, i.e.,
357: .vb
358: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
359: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
360: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
361: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
362: .ve
364: Level: advanced
366: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
367: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
369: @*/
370: int VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
371: {
372: Vec_MPI *v;
373: int ierr;
378: v = (Vec_MPI*)g->data;
379: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
380: if (!v->localupdate) return(0);
381:
382: if (scattermode == SCATTER_REVERSE) {
383: VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
384: } else {
385: VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
386: }
387: return(0);
388: }
390: /*@
391: VecGhostUpdateEnd - End the vector scatter to update the vector from
392: local representation to global or global representation to local.
394: Collective on Vec
396: Input Parameters:
397: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
398: . insertmode - one of ADD_VALUES or INSERT_VALUES
399: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
401: Notes:
403: Use the following to update the ghost regions with correct values from the owning process
404: .vb
405: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
406: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
407: .ve
409: Use the following to accumulate the ghost region values onto the owning processors
410: .vb
411: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
412: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
413: .ve
415: To accumulate the ghost region values onto the owning processors and then update
416: the ghost regions correctly, call the later followed by the former, i.e.,
417: .vb
418: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
419: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
420: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
421: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
422: .ve
424: Level: advanced
426: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
427: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
429: @*/
430: int VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
431: {
432: Vec_MPI *v;
433: int ierr;
438: v = (Vec_MPI*)g->data;
439: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
440: if (!v->localupdate) return(0);
442: if (scattermode == SCATTER_REVERSE) {
443: VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
444: } else {
445: VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
446: }
447: return(0);
448: }
450: /*@C
451: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
452: the caller allocates the array space.
454: Collective on MPI_Comm
456: Input Parameters:
457: + comm - the MPI communicator to use
458: . n - local vector length
459: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
460: . nghost - number of local ghost points
461: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
462: - array - the space to store the vector values (as long as n + nghost)
464: Output Parameter:
465: . vv - the global vector representation (without ghost points as part of vector)
466:
467: Notes:
468: Use VecGhostGetLocalForm() to access the local, ghosted representation
469: of the vector.
471: Level: advanced
473: Concepts: vectors^creating with array
474: Concepts: vectors^ghosted
476: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
477: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray()
479: @*/
480: int VecCreateGhostWithArray(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],const Scalar array[],Vec *vv)
481: {
482: int ierr;
483: Vec_MPI *w;
484: Scalar *larray;
487: *vv = 0;
489: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
490: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
491: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
492: PetscSplitOwnership(comm,&n,&N);
493: /* Create global representation */
494: VecCreate(comm,n,N,vv);
495: VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);
496: w = (Vec_MPI *)(*vv)->data;
497: /* Create local representation */
498: VecGetArray(*vv,&larray);
499: VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
500: PetscLogObjectParent(*vv,w->localrep);
501: VecRestoreArray(*vv,&larray);
503: /*
504: Create scatter context for scattering (updating) ghost values
505: */
506: if (ghosts) {
507: IS from,to;
508:
509: ISCreateGeneral(comm,nghost,ghosts,&from);
510: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
511: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
512: PetscLogObjectParent(*vv,w->localupdate);
513: ISDestroy(to);
514: ISDestroy(from);
515: }
517: return(0);
518: }
520: /*@C
521: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
523: Collective on MPI_Comm
525: Input Parameters:
526: + comm - the MPI communicator to use
527: . n - local vector length
528: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
529: . nghost - number of local ghost points
530: - ghosts - global indices of ghost points
532: Output Parameter:
533: . vv - the global vector representation (without ghost points as part of vector)
534:
535: Notes:
536: Use VecGhostGetLocalForm() to access the local, ghosted representation
537: of the vector.
539: Level: advanced
541: Concepts: vectors^ghosted
543: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
544: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
545: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd()
547: @*/
548: int VecCreateGhost(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],Vec *vv)
549: {
553: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
554: return(0);
555: }
557: int VecDuplicate_MPI(Vec win,Vec *v)
558: {
559: int ierr;
560: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
561: Scalar *array;
562: #if defined(PETSC_HAVE_AMS)
563: int (*f)(AMS_Memory,char *,Vec);
564: #endif
567: VecCreate(win->comm,win->n,win->N,v);
568: VecCreate_MPI_Private(*v,w->nghost,0,win->map);
569: vw = (Vec_MPI *)(*v)->data;
570: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
572: /* save local representation of the parallel vector (and scatter) if it exists */
573: if (w->localrep) {
574: VecGetArray(*v,&array);
575: VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);
576: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
577: VecRestoreArray(*v,&array);
578: PetscLogObjectParent(*v,vw->localrep);
579: vw->localupdate = w->localupdate;
580: if (vw->localupdate) {
581: PetscObjectReference((PetscObject)vw->localupdate);
582: }
583: }
585: /* New vector should inherit stashing property of parent */
586: vw->donotstash = w->donotstash;
587:
588: PetscOListDuplicate(win->olist,&(*v)->olist);
589: PetscFListDuplicate(win->qlist,&(*v)->qlist);
590: if (win->mapping) {
591: (*v)->mapping = win->mapping;
592: PetscObjectReference((PetscObject)win->mapping);
593: }
594: if (win->bmapping) {
595: (*v)->bmapping = win->bmapping;
596: PetscObjectReference((PetscObject)win->bmapping);
597: }
598: (*v)->bs = win->bs;
599: (*v)->bstash.bs = win->bstash.bs;
601: #if defined(PETSC_HAVE_AMS)
602: /*
603: If the vector knows its "layout" let it set it, otherwise it defaults
604: to correct 1d distribution
605: */
606: PetscObjectQueryFunction((PetscObject)(*v),"AMSSetFieldBlock_C",(void (**)())&f);
607: if (f) {
608: (*f)((AMS_Memory)(*v)->amem,"values",*v);
609: }
610: #endif
611: return(0);
612: }
614: /* ------------------------------------------------------------------------------------------*/
615: /*@C
616: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
617: the caller allocates the array space. Indices in the ghost region are based on blocks.
619: Collective on MPI_Comm
621: Input Parameters:
622: + comm - the MPI communicator to use
623: . bs - block size
624: . n - local vector length
625: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
626: . nghost - number of local ghost blocks
627: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
628: - array - the space to store the vector values (as long as n + nghost*bs)
630: Output Parameter:
631: . vv - the global vector representation (without ghost points as part of vector)
632:
633: Notes:
634: Use VecGhostGetLocalForm() to access the local, ghosted representation
635: of the vector.
637: n is the local vector size (total local size not the number of blocks) while nghost
638: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
639: portion is bs*nghost
641: Level: advanced
643: Concepts: vectors^creating ghosted
644: Concepts: vectors^creating with array
646: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
647: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
648: VecCreateGhostWithArray(), VecCreateGhostBlocked()
650: @*/
651: int VecCreateGhostBlockWithArray(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],const Scalar array[],Vec *vv)
652: {
653: int ierr;
654: Vec_MPI *w;
655: Scalar *larray;
658: *vv = 0;
660: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
661: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
662: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
663: PetscSplitOwnership(comm,&n,&N);
664: /* Create global representation */
665: VecCreate(comm,n,N,vv);
666: VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);
667: VecSetBlockSize(*vv,bs);
668: w = (Vec_MPI *)(*vv)->data;
669: /* Create local representation */
670: VecGetArray(*vv,&larray);
671: VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
672: VecSetBlockSize(w->localrep,bs);
673: PetscLogObjectParent(*vv,w->localrep);
674: VecRestoreArray(*vv,&larray);
676: /*
677: Create scatter context for scattering (updating) ghost values
678: */
679: if (ghosts) {
680: IS from,to;
681:
682: ISCreateBlock(comm,bs,nghost,ghosts,&from);
683: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
684: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
685: PetscLogObjectParent(*vv,w->localupdate);
686: ISDestroy(to);
687: ISDestroy(from);
688: }
690: return(0);
691: }
693: /*@C
694: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
695: The indicing of the ghost points is done with blocks.
697: Collective on MPI_Comm
699: Input Parameters:
700: + comm - the MPI communicator to use
701: . bs - the block size
702: . n - local vector length
703: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
704: . nghost - number of local ghost blocks
705: - ghosts - global indices of ghost blocks
707: Output Parameter:
708: . vv - the global vector representation (without ghost points as part of vector)
709:
710: Notes:
711: Use VecGhostGetLocalForm() to access the local, ghosted representation
712: of the vector.
714: n is the local vector size (total local size not the number of blocks) while nghost
715: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
716: portion is bs*nghost
718: Level: advanced
720: Concepts: vectors^ghosted
722: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
723: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
724: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
726: @*/
727: int VecCreateGhostBlock(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],Vec *vv)
728: {
732: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
733: return(0);
734: }
736: /*
737: These introduce a ghosted vector where the ghosting is determined by the call to
738: VecSetLocalToGlobalMapping()
739: */
741: int VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
742: {
743: int ierr;
744: Vec_MPI *v = (Vec_MPI *)vv->data;
747: v->nghost = map->n - vv->n;
749: /* we need to make longer the array space that was allocated when the vector was created */
750: ierr = PetscFree(v->array_allocated);
751: ierr = PetscMalloc(map->n*sizeof(Scalar),&v->array_allocated);
752: v->array = v->array_allocated;
753:
754: /* Create local representation */
755: VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
756: PetscLogObjectParent(vv,v->localrep);
758: return(0);
759: }
762: int VecSetValuesLocal_FETI(Vec vv,int n,const int *ix,const Scalar *values,InsertMode mode)
763: {
764: int ierr;
765: Vec_MPI *v = (Vec_MPI *)vv->data;
768: VecSetValues(v->localrep,n,ix,values,mode);
769: return(0);
770: }
772: EXTERN_C_BEGIN
773: int VecCreate_FETI(Vec vv)
774: {
778: VecSetType(vv,VEC_MPI);
779:
780: /* overwrite the functions to handle setting values locally */
781: vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
782: vv->ops->setvalueslocal = VecSetValuesLocal_FETI;
783: vv->ops->assemblybegin = 0;
784: vv->ops->assemblyend = 0;
785: vv->ops->setvaluesblocked = 0;
786: vv->ops->setvaluesblocked = 0;
788: return(0);
789: }
790: EXTERN_C_END