Actual source code: pbvec.c
1: /*$Id: pbvec.c,v 1.173 2001/09/12 03:26:59 bsmith Exp $*/
3: /*
4: This file contains routines for Parallel vector operations.
5: */
6: #include src/vec/impls/mpi/pvecimpl.h
8: /*
9: Note this code is very similar to VecPublish_Seq()
10: */
13: static int VecPublish_MPI(PetscObject obj)
14: {
15: #if defined(PETSC_HAVE_AMS)
16: Vec v = (Vec) obj;
17: Vec_MPI *s = (Vec_MPI*)v->data;
18: int ierr,(*f)(AMS_Memory,char *,Vec);
19: #endif
22: #if defined(PETSC_HAVE_AMS)
23: /* if it is already published then return */
24: if (v->amem >=0) return(0);
26: PetscObjectPublishBaseBegin(obj);
27: AMS_Memory_add_field((AMS_Memory)v->amem,"values",s->array,v->n,AMS_DOUBLE,AMS_READ,
28: AMS_DISTRIBUTED,AMS_REDUCT_UNDEF);
30: /*
31: If the vector knows its "layout" let it set it, otherwise it defaults
32: to correct 1d distribution
33: */
34: PetscObjectQueryFunction(obj,"AMSSetFieldBlock_C",(void (**)(void))&f);
35: if (f) {
36: (*f)((AMS_Memory)v->amem,"values",v);
37: }
38: PetscObjectPublishBaseEnd(obj);
39: #endif
40: return(0);
41: }
45: int VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
46: {
47: PetscScalar sum,work;
48: int ierr;
51: VecDot_Seq(xin,yin,&work);
52: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
53: *z = sum;
54: return(0);
55: }
59: int VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
60: {
61: PetscScalar sum,work;
62: int ierr;
65: VecTDot_Seq(xin,yin,&work);
66: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
67: *z = sum;
68: return(0);
69: }
73: int VecSetOption_MPI(Vec v,VecOption op)
74: {
76: if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
77: v->stash.donotstash = PETSC_TRUE;
78: } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
79: v->stash.donotstash = PETSC_FALSE;
80: }
81: return(0);
82: }
83:
84: EXTERN int VecDuplicate_MPI(Vec,Vec *);
85: EXTERN_C_BEGIN
86: EXTERN int VecView_MPI_Draw(Vec,PetscViewer);
87: EXTERN_C_END
89: static struct _VecOps DvOps = { VecDuplicate_MPI,
90: VecDuplicateVecs_Default,
91: VecDestroyVecs_Default,
92: VecDot_MPI,
93: VecMDot_MPI,
94: VecNorm_MPI,
95: VecTDot_MPI,
96: VecMTDot_MPI,
97: VecScale_Seq,
98: VecCopy_Seq,
99: VecSet_Seq,
100: VecSwap_Seq,
101: VecAXPY_Seq,
102: VecAXPBY_Seq,
103: VecMAXPY_Seq,
104: VecAYPX_Seq,
105: VecWAXPY_Seq,
106: VecPointwiseMult_Seq,
107: VecPointwiseDivide_Seq,
108: VecSetValues_MPI,
109: VecAssemblyBegin_MPI,
110: VecAssemblyEnd_MPI,
111: VecGetArray_Seq,
112: VecGetSize_MPI,
113: VecGetSize_Seq,
114: VecRestoreArray_Seq,
115: VecMax_MPI,
116: VecMin_MPI,
117: VecSetRandom_Seq,
118: VecSetOption_MPI,
119: VecSetValuesBlocked_MPI,
120: VecDestroy_MPI,
121: VecView_MPI,
122: VecPlaceArray_Seq,
123: VecReplaceArray_Seq,
124: VecDot_Seq,
125: VecTDot_Seq,
126: VecNorm_Seq,
127: VecLoadIntoVector_Default,
128: VecReciprocal_Default,
129: 0, /* VecViewNative... */
130: VecConjugate_Seq,
131: 0,
132: 0,
133: VecResetArray_Seq,
134: 0,
135: VecMaxPointwiseDivide_Seq};
139: /*
140: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
141: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
142: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
143: */
144: int VecCreate_MPI_Private(Vec v,int nghost,const PetscScalar array[],PetscMap map)
145: {
146: Vec_MPI *s;
147: int ierr,size,rank;
150: MPI_Comm_size(v->comm,&size);
151: MPI_Comm_rank(v->comm,&rank);
153: v->bops->publish = VecPublish_MPI;
154: PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));
155: PetscNew(Vec_MPI,&s);
156: PetscMemzero(s,sizeof(Vec_MPI));
157: PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
158: v->data = (void*)s;
159: s->nghost = nghost;
160: v->mapping = 0;
161: v->bmapping = 0;
162: v->petscnative = PETSC_TRUE;
164: if (array) {
165: s->array = (PetscScalar *)array;
166: s->array_allocated = 0;
167: } else {
168: int n = ((v->n+nghost) > 0) ? v->n+nghost : 1;
169: PetscMalloc(n*sizeof(PetscScalar),&s->array);
170: s->array_allocated = s->array;
171: PetscMemzero(s->array,v->n*sizeof(PetscScalar));
172: }
174: /* By default parallel vectors do not have local representation */
175: s->localrep = 0;
176: s->localupdate = 0;
178: v->stash.insertmode = NOT_SET_VALUES;
180: if (!v->map) {
181: if (!map) {
182: PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);
183: } else {
184: v->map = map;
185: PetscObjectReference((PetscObject)map);
186: }
187: }
188: /* create the stashes. The block-size for bstash is set later when
189: VecSetValuesBlocked is called.
190: */
191: VecStashCreate_Private(v->comm,1,&v->stash);
192: VecStashCreate_Private(v->comm,v->bs,&v->bstash);
193:
194: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
195: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
196: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
197: #endif
198: PetscObjectChangeTypeName((PetscObject)v,VECMPI);
199: PetscPublishAll(v);
200: return(0);
201: }
203: EXTERN_C_BEGIN
206: int VecCreate_MPI(Vec vv)
207: {
211: if (vv->bs > 0) {
212: PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);
213: } else {
214: PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
215: }
216: VecCreate_MPI_Private(vv,0,0,PETSC_NULL);
217: VecSetSerializeType(vv,VEC_SER_MPI_BINARY);
218: return(0);
219: }
223: int VecSerialize_MPI(MPI_Comm comm, Vec *vec, PetscViewer viewer, PetscTruth store)
224: {
225: Vec v;
226: Vec_MPI *x;
227: PetscScalar *array;
228: int fd;
229: int vars, locVars, ghostVars;
230: int size;
231: int ierr;
234: PetscViewerBinaryGetDescriptor(viewer, &fd);
235: if (store) {
236: v = *vec;
237: x = (Vec_MPI *) v->data;
238: PetscBinaryWrite(fd, &v->N, 1, PETSC_INT, 0);
239: PetscBinaryWrite(fd, &v->n, 1, PETSC_INT, 0);
240: PetscBinaryWrite(fd, &x->nghost, 1, PETSC_INT, 0);
241: PetscBinaryWrite(fd, x->array, v->n + x->nghost, PETSC_SCALAR, 0);
242: } else {
243: PetscBinaryRead(fd, &vars, 1, PETSC_INT);
244: PetscBinaryRead(fd, &locVars, 1, PETSC_INT);
245: PetscBinaryRead(fd, &ghostVars, 1, PETSC_INT);
246: MPI_Allreduce(&locVars, &size, 1, MPI_INT, MPI_SUM, comm);
247: if (size != vars) SETERRQ(PETSC_ERR_ARG_CORRUPT, "Invalid row partition");
248: VecCreate(comm, &v);
249: VecSetSizes(v, locVars, vars);
250: if (locVars + ghostVars > 0) {
251: PetscMalloc((locVars + ghostVars) * sizeof(PetscScalar), &array);
252: PetscBinaryRead(fd, array, locVars + ghostVars, PETSC_SCALAR);
253: VecCreate_MPI_Private(v, ghostVars, array, PETSC_NULL);
254: ((Vec_MPI *) v->data)->array_allocated = array;
255: } else {
256: VecCreate_MPI_Private(v, ghostVars, PETSC_NULL, PETSC_NULL);
257: }
259: VecAssemblyBegin(v);
260: VecAssemblyEnd(v);
261: *vec = v;
262: }
264: return(0);
265: }
266: EXTERN_C_END
270: /*@C
271: VecCreateMPIWithArray - Creates a parallel, array-style vector,
272: where the user provides the array space to store the vector values.
274: Collective on MPI_Comm
276: Input Parameters:
277: + comm - the MPI communicator to use
278: . n - local vector length, cannot be PETSC_DECIDE
279: . N - global vector length (or PETSC_DECIDE to have calculated)
280: - array - the user provided array to store the vector values
282: Output Parameter:
283: . vv - the vector
284:
285: Notes:
286: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
287: same type as an existing vector.
289: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
290: at a later stage to SET the array for storing the vector values.
292: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
293: The user should not free the array until the vector is destroyed.
295: Level: intermediate
297: Concepts: vectors^creating with array
299: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
300: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
302: @*/
303: int VecCreateMPIWithArray(MPI_Comm comm,int n,int N,const PetscScalar array[],Vec *vv)
304: {
308: if (n == PETSC_DECIDE) {
309: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
310: }
311: PetscSplitOwnership(comm,&n,&N);
312: VecCreate(comm,vv);
313: VecSetSizes(*vv,n,N);
314: VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);
315: return(0);
316: }
320: /*@C
321: VecGhostGetLocalForm - Obtains the local ghosted representation of
322: a parallel vector created with VecCreateGhost().
324: Not Collective
326: Input Parameter:
327: . g - the global vector. Vector must be have been obtained with either
328: VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
330: Output Parameter:
331: . l - the local (ghosted) representation
333: Notes:
334: This routine does not actually update the ghost values, but rather it
335: returns a sequential vector that includes the locations for the ghost
336: values and their current values. The returned vector and the original
337: vector passed in share the same array that contains the actual vector data.
339: One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
340: finished using the object.
342: Level: advanced
344: Concepts: vectors^ghost point access
346: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
348: @*/
349: int VecGhostGetLocalForm(Vec g,Vec *l)
350: {
351: int ierr;
352: PetscTruth isseq,ismpi;
358: PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
359: PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
360: if (ismpi) {
361: Vec_MPI *v = (Vec_MPI*)g->data;
362: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
363: *l = v->localrep;
364: } else if (isseq) {
365: *l = g;
366: } else {
367: SETERRQ1(1,"Vector type %s does not have local representation",g->type_name);
368: }
369: PetscObjectReference((PetscObject)*l);
370: return(0);
371: }
375: /*@C
376: VecGhostRestoreLocalForm - Restores the local ghosted representation of
377: a parallel vector obtained with VecGhostGetLocalForm().
379: Not Collective
381: Input Parameter:
382: + g - the global vector
383: - l - the local (ghosted) representation
385: Notes:
386: This routine does not actually update the ghost values, but rather it
387: returns a sequential vector that includes the locations for the ghost values
388: and their current values.
390: Level: advanced
392: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
393: @*/
394: int VecGhostRestoreLocalForm(Vec g,Vec *l)
395: {
397: PetscObjectDereference((PetscObject)*l);
398: return(0);
399: }
403: /*@
404: VecGhostUpdateBegin - Begins the vector scatter to update the vector from
405: local representation to global or global representation to local.
407: Collective on Vec
409: Input Parameters:
410: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
411: . insertmode - one of ADD_VALUES or INSERT_VALUES
412: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
414: Notes:
415: Use the following to update the ghost regions with correct values from the owning process
416: .vb
417: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
418: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
419: .ve
421: Use the following to accumulate the ghost region values onto the owning processors
422: .vb
423: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
424: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
425: .ve
427: To accumulate the ghost region values onto the owning processors and then update
428: the ghost regions correctly, call the later followed by the former, i.e.,
429: .vb
430: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
431: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
432: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
433: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
434: .ve
436: Level: advanced
438: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
439: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
441: @*/
442: int VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
443: {
444: Vec_MPI *v;
445: int ierr;
450: v = (Vec_MPI*)g->data;
451: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
452: if (!v->localupdate) return(0);
453:
454: if (scattermode == SCATTER_REVERSE) {
455: VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
456: } else {
457: VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
458: }
459: return(0);
460: }
464: /*@
465: VecGhostUpdateEnd - End the vector scatter to update the vector from
466: local representation to global or global representation to local.
468: Collective on Vec
470: Input Parameters:
471: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
472: . insertmode - one of ADD_VALUES or INSERT_VALUES
473: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
475: Notes:
477: Use the following to update the ghost regions with correct values from the owning process
478: .vb
479: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
480: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
481: .ve
483: Use the following to accumulate the ghost region values onto the owning processors
484: .vb
485: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
486: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
487: .ve
489: To accumulate the ghost region values onto the owning processors and then update
490: the ghost regions correctly, call the later followed by the former, i.e.,
491: .vb
492: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
493: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
494: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
495: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
496: .ve
498: Level: advanced
500: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
501: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
503: @*/
504: int VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
505: {
506: Vec_MPI *v;
507: int ierr;
512: v = (Vec_MPI*)g->data;
513: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
514: if (!v->localupdate) return(0);
516: if (scattermode == SCATTER_REVERSE) {
517: VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
518: } else {
519: VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
520: }
521: return(0);
522: }
526: /*@C
527: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
528: the caller allocates the array space.
530: Collective on MPI_Comm
532: Input Parameters:
533: + comm - the MPI communicator to use
534: . n - local vector length
535: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
536: . nghost - number of local ghost points
537: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
538: - array - the space to store the vector values (as long as n + nghost)
540: Output Parameter:
541: . vv - the global vector representation (without ghost points as part of vector)
542:
543: Notes:
544: Use VecGhostGetLocalForm() to access the local, ghosted representation
545: of the vector.
547: Level: advanced
549: Concepts: vectors^creating with array
550: Concepts: vectors^ghosted
552: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
553: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
554: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
556: @*/
557: int VecCreateGhostWithArray(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
558: {
559: int ierr;
560: Vec_MPI *w;
561: PetscScalar *larray;
562: IS from,to;
565: *vv = 0;
567: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
568: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
569: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
570: PetscSplitOwnership(comm,&n,&N);
571: /* Create global representation */
572: VecCreate(comm,vv);
573: VecSetSizes(*vv,n,N);
574: VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);
575: w = (Vec_MPI *)(*vv)->data;
576: /* Create local representation */
577: VecGetArray(*vv,&larray);
578: VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
579: PetscLogObjectParent(*vv,w->localrep);
580: VecRestoreArray(*vv,&larray);
582: /*
583: Create scatter context for scattering (updating) ghost values
584: */
585: ISCreateGeneral(comm,nghost,ghosts,&from);
586: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
587: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
588: PetscLogObjectParent(*vv,w->localupdate);
589: ISDestroy(to);
590: ISDestroy(from);
592: return(0);
593: }
597: /*@C
598: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
600: Collective on MPI_Comm
602: Input Parameters:
603: + comm - the MPI communicator to use
604: . n - local vector length
605: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
606: . nghost - number of local ghost points
607: - ghosts - global indices of ghost points
609: Output Parameter:
610: . vv - the global vector representation (without ghost points as part of vector)
611:
612: Notes:
613: Use VecGhostGetLocalForm() to access the local, ghosted representation
614: of the vector.
616: Level: advanced
618: Concepts: vectors^ghosted
620: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
621: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
622: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
623: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
625: @*/
626: int VecCreateGhost(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],Vec *vv)
627: {
631: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
632: return(0);
633: }
637: int VecDuplicate_MPI(Vec win,Vec *v)
638: {
639: int ierr;
640: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
641: PetscScalar *array;
642: #if defined(PETSC_HAVE_AMS)
643: int (*f)(AMS_Memory,char *,Vec);
644: #endif
647: VecCreate(win->comm,v);
648: VecSetSizes(*v,win->n,win->N);
649: VecCreate_MPI_Private(*v,w->nghost,0,win->map);
650: vw = (Vec_MPI *)(*v)->data;
651: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
653: /* save local representation of the parallel vector (and scatter) if it exists */
654: if (w->localrep) {
655: VecGetArray(*v,&array);
656: VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);
657: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
658: VecRestoreArray(*v,&array);
659: PetscLogObjectParent(*v,vw->localrep);
660: vw->localupdate = w->localupdate;
661: if (vw->localupdate) {
662: PetscObjectReference((PetscObject)vw->localupdate);
663: }
664: }
666: /* New vector should inherit stashing property of parent */
667: (*v)->stash.donotstash = win->stash.donotstash;
668:
669: PetscOListDuplicate(win->olist,&(*v)->olist);
670: PetscFListDuplicate(win->qlist,&(*v)->qlist);
671: if (win->mapping) {
672: (*v)->mapping = win->mapping;
673: PetscObjectReference((PetscObject)win->mapping);
674: }
675: if (win->bmapping) {
676: (*v)->bmapping = win->bmapping;
677: PetscObjectReference((PetscObject)win->bmapping);
678: }
679: (*v)->bs = win->bs;
680: (*v)->bstash.bs = win->bstash.bs;
682: #if defined(PETSC_HAVE_AMS)
683: /*
684: If the vector knows its "layout" let it set it, otherwise it defaults
685: to correct 1d distribution
686: */
687: PetscObjectQueryFunction((PetscObject)(*v),"AMSSetFieldBlock_C",(void (**)(void))&f);
688: if (f) {
689: (*f)((AMS_Memory)(*v)->amem,"values",*v);
690: }
691: #endif
692: return(0);
693: }
695: /* ------------------------------------------------------------------------------------------*/
698: /*@C
699: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
700: the caller allocates the array space. Indices in the ghost region are based on blocks.
702: Collective on MPI_Comm
704: Input Parameters:
705: + comm - the MPI communicator to use
706: . bs - block size
707: . n - local vector length
708: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
709: . nghost - number of local ghost blocks
710: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
711: - array - the space to store the vector values (as long as n + nghost*bs)
713: Output Parameter:
714: . vv - the global vector representation (without ghost points as part of vector)
715:
716: Notes:
717: Use VecGhostGetLocalForm() to access the local, ghosted representation
718: of the vector.
720: n is the local vector size (total local size not the number of blocks) while nghost
721: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
722: portion is bs*nghost
724: Level: advanced
726: Concepts: vectors^creating ghosted
727: Concepts: vectors^creating with array
729: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
730: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
731: VecCreateGhostWithArray(), VecCreateGhostBlocked()
733: @*/
734: int VecCreateGhostBlockWithArray(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
735: {
736: int ierr;
737: Vec_MPI *w;
738: PetscScalar *larray;
739: IS from,to;
742: *vv = 0;
744: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
745: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
746: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
747: PetscSplitOwnership(comm,&n,&N);
748: /* Create global representation */
749: VecCreate(comm,vv);
750: VecSetSizes(*vv,n,N);
751: VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);
752: VecSetBlockSize(*vv,bs);
753: w = (Vec_MPI *)(*vv)->data;
754: /* Create local representation */
755: VecGetArray(*vv,&larray);
756: VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
757: VecSetBlockSize(w->localrep,bs);
758: PetscLogObjectParent(*vv,w->localrep);
759: VecRestoreArray(*vv,&larray);
761: /*
762: Create scatter context for scattering (updating) ghost values
763: */
764: ISCreateBlock(comm,bs,nghost,ghosts,&from);
765: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
766: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
767: PetscLogObjectParent(*vv,w->localupdate);
768: ISDestroy(to);
769: ISDestroy(from);
771: return(0);
772: }
776: /*@C
777: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
778: The indicing of the ghost points is done with blocks.
780: Collective on MPI_Comm
782: Input Parameters:
783: + comm - the MPI communicator to use
784: . bs - the block size
785: . n - local vector length
786: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
787: . nghost - number of local ghost blocks
788: - ghosts - global indices of ghost blocks
790: Output Parameter:
791: . vv - the global vector representation (without ghost points as part of vector)
792:
793: Notes:
794: Use VecGhostGetLocalForm() to access the local, ghosted representation
795: of the vector.
797: n is the local vector size (total local size not the number of blocks) while nghost
798: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
799: portion is bs*nghost
801: Level: advanced
803: Concepts: vectors^ghosted
805: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
806: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
807: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
809: @*/
810: int VecCreateGhostBlock(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],Vec *vv)
811: {
815: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
816: return(0);
817: }
819: /*
820: These introduce a ghosted vector where the ghosting is determined by the call to
821: VecSetLocalToGlobalMapping()
822: */
826: int VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
827: {
828: int ierr;
829: Vec_MPI *v = (Vec_MPI *)vv->data;
832: v->nghost = map->n - vv->n;
834: /* we need to make longer the array space that was allocated when the vector was created */
835: PetscFree(v->array_allocated);
836: PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
837: v->array = v->array_allocated;
838:
839: /* Create local representation */
840: VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
841: PetscLogObjectParent(vv,v->localrep);
843: return(0);
844: }
849: int VecSetValuesLocal_FETI(Vec vv,int n,const int *ix,const PetscScalar *values,InsertMode mode)
850: {
851: int ierr;
852: Vec_MPI *v = (Vec_MPI *)vv->data;
855: VecSetValues(v->localrep,n,ix,values,mode);
856: return(0);
857: }
859: EXTERN_C_BEGIN
862: int VecCreate_FETI(Vec vv)
863: {
867: VecSetType(vv,VECMPI);
868:
869: /* overwrite the functions to handle setting values locally */
870: vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
871: vv->ops->setvalueslocal = VecSetValuesLocal_FETI;
872: vv->ops->assemblybegin = 0;
873: vv->ops->assemblyend = 0;
874: vv->ops->setvaluesblocked = 0;
875: vv->ops->setvaluesblocked = 0;
877: return(0);
878: }
879: EXTERN_C_END