Actual source code: pbvec.c
1: /*$Id: pbvec.c,v 1.173 2001/09/12 03:26:59 bsmith Exp $*/
3: /*
4: This file contains routines for Parallel vector operations.
5: */
6: #include src/vec/impls/mpi/pvecimpl.h
8: /*
9: Note this code is very similar to VecPublish_Seq()
10: */
11: static int VecPublish_MPI(PetscObject obj)
12: {
13: #if defined(PETSC_HAVE_AMS)
14: Vec v = (Vec) obj;
15: Vec_MPI *s = (Vec_MPI*)v->data;
16: int ierr,(*f)(AMS_Memory,char *,Vec);
17: #endif
20: #if defined(PETSC_HAVE_AMS)
21: /* if it is already published then return */
22: if (v->amem >=0) return(0);
24: PetscObjectPublishBaseBegin(obj);
25: AMS_Memory_add_field((AMS_Memory)v->amem,"values",s->array,v->n,AMS_DOUBLE,AMS_READ,
26: AMS_DISTRIBUTED,AMS_REDUCT_UNDEF);
28: /*
29: If the vector knows its "layout" let it set it, otherwise it defaults
30: to correct 1d distribution
31: */
32: PetscObjectQueryFunction(obj,"AMSSetFieldBlock_C",(void (**)(void))&f);
33: if (f) {
34: (*f)((AMS_Memory)v->amem,"values",v);
35: }
36: PetscObjectPublishBaseEnd(obj);
37: #endif
38: return(0);
39: }
41: int VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
42: {
43: PetscScalar sum,work;
44: int ierr;
47: VecDot_Seq(xin,yin,&work);
48: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
49: *z = sum;
50: return(0);
51: }
53: int VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
54: {
55: PetscScalar sum,work;
56: int ierr;
59: VecTDot_Seq(xin,yin,&work);
60: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,PetscSum_Op,xin->comm);
61: *z = sum;
62: return(0);
63: }
65: int VecSetOption_MPI(Vec v,VecOption op)
66: {
68: if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
69: v->stash.donotstash = PETSC_TRUE;
70: } else if (op == VEC_TREAT_OFF_PROC_ENTRIES) {
71: v->stash.donotstash = PETSC_FALSE;
72: }
73: return(0);
74: }
75:
76: EXTERN int VecDuplicate_MPI(Vec,Vec *);
77: EXTERN_C_BEGIN
78: EXTERN int VecView_MPI_Draw(Vec,PetscViewer);
79: EXTERN_C_END
81: static struct _VecOps DvOps = { VecDuplicate_MPI,
82: VecDuplicateVecs_Default,
83: VecDestroyVecs_Default,
84: VecDot_MPI,
85: VecMDot_MPI,
86: VecNorm_MPI,
87: VecTDot_MPI,
88: VecMTDot_MPI,
89: VecScale_Seq,
90: VecCopy_Seq,
91: VecSet_Seq,
92: VecSwap_Seq,
93: VecAXPY_Seq,
94: VecAXPBY_Seq,
95: VecMAXPY_Seq,
96: VecAYPX_Seq,
97: VecWAXPY_Seq,
98: VecPointwiseMult_Seq,
99: VecPointwiseDivide_Seq,
100: VecSetValues_MPI,
101: VecAssemblyBegin_MPI,
102: VecAssemblyEnd_MPI,
103: VecGetArray_Seq,
104: VecGetSize_MPI,
105: VecGetSize_Seq,
106: VecRestoreArray_Seq,
107: VecMax_MPI,
108: VecMin_MPI,
109: VecSetRandom_Seq,
110: VecSetOption_MPI,
111: VecSetValuesBlocked_MPI,
112: VecDestroy_MPI,
113: VecView_MPI,
114: VecPlaceArray_Seq,
115: VecReplaceArray_Seq,
116: VecDot_Seq,
117: VecTDot_Seq,
118: VecNorm_Seq,
119: VecLoadIntoVector_Default,
120: VecReciprocal_Default,
121: 0, /* VecViewNative... */
122: VecConjugate_Seq,
123: 0,
124: 0,
125: VecResetArray_Seq};
127: /*
128: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
129: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
130: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
131: */
132: int VecCreate_MPI_Private(Vec v,int nghost,const PetscScalar array[],PetscMap map)
133: {
134: Vec_MPI *s;
135: int ierr,size,rank;
138: MPI_Comm_size(v->comm,&size);
139: MPI_Comm_rank(v->comm,&rank);
141: v->bops->publish = VecPublish_MPI;
142: PetscLogObjectMemory(v,sizeof(Vec_MPI) + (v->n+nghost+1)*sizeof(PetscScalar));
143: ierr = PetscNew(Vec_MPI,&s);
144: ierr = PetscMemzero(s,sizeof(Vec_MPI));
145: ierr = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
146: v->data = (void*)s;
147: s->nghost = nghost;
148: v->mapping = 0;
149: v->bmapping = 0;
150: v->petscnative = PETSC_TRUE;
152: if (array) {
153: s->array = (PetscScalar *)array;
154: s->array_allocated = 0;
155: } else {
156: ierr = PetscMalloc((v->n+nghost+1)*sizeof(PetscScalar),&s->array);
157: s->array_allocated = s->array;
158: ierr = PetscMemzero(s->array,v->n*sizeof(PetscScalar));
159: }
161: /* By default parallel vectors do not have local representation */
162: s->localrep = 0;
163: s->localupdate = 0;
165: v->stash.insertmode = NOT_SET_VALUES;
167: if (!v->map) {
168: if (!map) {
169: PetscMapCreateMPI(v->comm,v->n,v->N,&v->map);
170: } else {
171: v->map = map;
172: PetscObjectReference((PetscObject)map);
173: }
174: }
175: /* create the stashes. The block-size for bstash is set later when
176: VecSetValuesBlocked is called.
177: */
178: VecStashCreate_Private(v->comm,1,&v->stash);
179: VecStashCreate_Private(v->comm,v->bs,&v->bstash);
180:
181: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE)
182: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
183: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
184: #endif
185: PetscObjectChangeTypeName((PetscObject)v,VECMPI);
186: PetscPublishAll(v);
187: return(0);
188: }
190: EXTERN_C_BEGIN
191: int VecCreate_MPI(Vec vv)
192: {
196: if (vv->bs > 0) {
197: PetscSplitOwnershipBlock(vv->comm,vv->bs,&vv->n,&vv->N);
198: } else {
199: PetscSplitOwnership(vv->comm,&vv->n,&vv->N);
200: }
201: VecCreate_MPI_Private(vv,0,0,PETSC_NULL);
202: VecSetSerializeType(vv,VEC_SER_MPI_BINARY);
203: return(0);
204: }
206: int VecSerialize_MPI(MPI_Comm comm, Vec *vec, PetscViewer viewer, PetscTruth store)
207: {
208: Vec v;
209: Vec_MPI *x;
210: PetscScalar *array;
211: int fd;
212: int vars, locVars, ghostVars;
213: int size;
214: int ierr;
217: PetscViewerBinaryGetDescriptor(viewer, &fd);
218: if (store) {
219: v = *vec;
220: x = (Vec_MPI *) v->data;
221: PetscBinaryWrite(fd, &v->N, 1, PETSC_INT, 0);
222: PetscBinaryWrite(fd, &v->n, 1, PETSC_INT, 0);
223: PetscBinaryWrite(fd, &x->nghost, 1, PETSC_INT, 0);
224: PetscBinaryWrite(fd, x->array, v->n + x->nghost, PETSC_SCALAR, 0);
225: } else {
226: PetscBinaryRead(fd, &vars, 1, PETSC_INT);
227: PetscBinaryRead(fd, &locVars, 1, PETSC_INT);
228: PetscBinaryRead(fd, &ghostVars, 1, PETSC_INT);
229: MPI_Allreduce(&locVars, &size, 1, MPI_INT, MPI_SUM, comm);
230: if (size != vars) SETERRQ(PETSC_ERR_ARG_CORRUPT, "Invalid row partition");
231: VecCreate(comm, &v);
232: VecSetSizes(v, locVars, vars);
233: if (locVars + ghostVars > 0) {
234: PetscMalloc((locVars + ghostVars) * sizeof(PetscScalar), &array);
235: PetscBinaryRead(fd, array, locVars + ghostVars, PETSC_SCALAR);
236: VecCreate_MPI_Private(v, ghostVars, array, PETSC_NULL);
237: ((Vec_MPI *) v->data)->array_allocated = array;
238: } else {
239: VecCreate_MPI_Private(v, ghostVars, PETSC_NULL, PETSC_NULL);
240: }
242: VecAssemblyBegin(v);
243: VecAssemblyEnd(v);
244: *vec = v;
245: }
247: return(0);
248: }
249: EXTERN_C_END
251: /*@C
252: VecCreateMPIWithArray - Creates a parallel, array-style vector,
253: where the user provides the array space to store the vector values.
255: Collective on MPI_Comm
257: Input Parameters:
258: + comm - the MPI communicator to use
259: . n - local vector length, cannot be PETSC_DECIDE
260: . N - global vector length (or PETSC_DECIDE to have calculated)
261: - array - the user provided array to store the vector values
263: Output Parameter:
264: . vv - the vector
265:
266: Notes:
267: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
268: same type as an existing vector.
270: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
271: at a later stage to SET the array for storing the vector values.
273: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
274: The user should not free the array until the vector is destroyed.
276: Level: intermediate
278: Concepts: vectors^creating with array
280: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
281: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
283: @*/
284: int VecCreateMPIWithArray(MPI_Comm comm,int n,int N,const PetscScalar array[],Vec *vv)
285: {
289: if (n == PETSC_DECIDE) {
290: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
291: }
292: PetscSplitOwnership(comm,&n,&N);
293: VecCreate(comm,vv);
294: VecSetSizes(*vv,n,N);
295: VecCreate_MPI_Private(*vv,0,array,PETSC_NULL);
296: return(0);
297: }
299: /*@C
300: VecGhostGetLocalForm - Obtains the local ghosted representation of
301: a parallel vector created with VecCreateGhost().
303: Not Collective
305: Input Parameter:
306: . g - the global vector. Vector must be have been obtained with either
307: VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
309: Output Parameter:
310: . l - the local (ghosted) representation
312: Notes:
313: This routine does not actually update the ghost values, but rather it
314: returns a sequential vector that includes the locations for the ghost
315: values and their current values. The returned vector and the original
316: vector passed in share the same array that contains the actual vector data.
318: One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
319: finished using the object.
321: Level: advanced
323: Concepts: vectors^ghost point access
325: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
327: @*/
328: int VecGhostGetLocalForm(Vec g,Vec *l)
329: {
330: int ierr;
331: PetscTruth isseq,ismpi;
337: PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
338: PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
339: if (ismpi) {
340: Vec_MPI *v = (Vec_MPI*)g->data;
341: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
342: *l = v->localrep;
343: } else if (isseq) {
344: *l = g;
345: } else {
346: SETERRQ1(1,"Vector type %s does not have local representation",g->type_name);
347: }
348: PetscObjectReference((PetscObject)*l);
349: return(0);
350: }
352: /*@C
353: VecGhostRestoreLocalForm - Restores the local ghosted representation of
354: a parallel vector obtained with VecGhostGetLocalForm().
356: Not Collective
358: Input Parameter:
359: + g - the global vector
360: - l - the local (ghosted) representation
362: Notes:
363: This routine does not actually update the ghost values, but rather it
364: returns a sequential vector that includes the locations for the ghost values
365: and their current values.
367: Level: advanced
369: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
370: @*/
371: int VecGhostRestoreLocalForm(Vec g,Vec *l)
372: {
374: PetscObjectDereference((PetscObject)*l);
375: return(0);
376: }
378: /*@
379: VecGhostUpdateBegin - Begins the vector scatter to update the vector from
380: local representation to global or global representation to local.
382: Collective on Vec
384: Input Parameters:
385: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
386: . insertmode - one of ADD_VALUES or INSERT_VALUES
387: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
389: Notes:
390: Use the following to update the ghost regions with correct values from the owning process
391: .vb
392: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
393: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
394: .ve
396: Use the following to accumulate the ghost region values onto the owning processors
397: .vb
398: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
399: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
400: .ve
402: To accumulate the ghost region values onto the owning processors and then update
403: the ghost regions correctly, call the later followed by the former, i.e.,
404: .vb
405: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
406: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
407: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
408: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
409: .ve
411: Level: advanced
413: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
414: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
416: @*/
417: int VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
418: {
419: Vec_MPI *v;
420: int ierr;
425: v = (Vec_MPI*)g->data;
426: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
427: if (!v->localupdate) return(0);
428:
429: if (scattermode == SCATTER_REVERSE) {
430: VecScatterBegin(v->localrep,g,insertmode,scattermode,v->localupdate);
431: } else {
432: VecScatterBegin(g,v->localrep,insertmode,scattermode,v->localupdate);
433: }
434: return(0);
435: }
437: /*@
438: VecGhostUpdateEnd - End the vector scatter to update the vector from
439: local representation to global or global representation to local.
441: Collective on Vec
443: Input Parameters:
444: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
445: . insertmode - one of ADD_VALUES or INSERT_VALUES
446: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
448: Notes:
450: Use the following to update the ghost regions with correct values from the owning process
451: .vb
452: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
453: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
454: .ve
456: Use the following to accumulate the ghost region values onto the owning processors
457: .vb
458: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
459: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
460: .ve
462: To accumulate the ghost region values onto the owning processors and then update
463: the ghost regions correctly, call the later followed by the former, i.e.,
464: .vb
465: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
466: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
467: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
468: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
469: .ve
471: Level: advanced
473: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
474: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
476: @*/
477: int VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
478: {
479: Vec_MPI *v;
480: int ierr;
485: v = (Vec_MPI*)g->data;
486: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
487: if (!v->localupdate) return(0);
489: if (scattermode == SCATTER_REVERSE) {
490: VecScatterEnd(v->localrep,g,insertmode,scattermode,v->localupdate);
491: } else {
492: VecScatterEnd(g,v->localrep,insertmode,scattermode,v->localupdate);
493: }
494: return(0);
495: }
497: /*@C
498: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
499: the caller allocates the array space.
501: Collective on MPI_Comm
503: Input Parameters:
504: + comm - the MPI communicator to use
505: . n - local vector length
506: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
507: . nghost - number of local ghost points
508: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
509: - array - the space to store the vector values (as long as n + nghost)
511: Output Parameter:
512: . vv - the global vector representation (without ghost points as part of vector)
513:
514: Notes:
515: Use VecGhostGetLocalForm() to access the local, ghosted representation
516: of the vector.
518: Level: advanced
520: Concepts: vectors^creating with array
521: Concepts: vectors^ghosted
523: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
524: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray()
526: @*/
527: int VecCreateGhostWithArray(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
528: {
529: int ierr;
530: Vec_MPI *w;
531: PetscScalar *larray;
534: *vv = 0;
536: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
537: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
538: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
539: PetscSplitOwnership(comm,&n,&N);
540: /* Create global representation */
541: VecCreate(comm,vv);
542: VecSetSizes(*vv,n,N);
543: VecCreate_MPI_Private(*vv,nghost,array,PETSC_NULL);
544: w = (Vec_MPI *)(*vv)->data;
545: /* Create local representation */
546: VecGetArray(*vv,&larray);
547: VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
548: PetscLogObjectParent(*vv,w->localrep);
549: VecRestoreArray(*vv,&larray);
551: /*
552: Create scatter context for scattering (updating) ghost values
553: */
554: if (ghosts) {
555: IS from,to;
556:
557: ISCreateGeneral(comm,nghost,ghosts,&from);
558: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
559: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
560: PetscLogObjectParent(*vv,w->localupdate);
561: ISDestroy(to);
562: ISDestroy(from);
563: }
565: return(0);
566: }
568: /*@C
569: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
571: Collective on MPI_Comm
573: Input Parameters:
574: + comm - the MPI communicator to use
575: . n - local vector length
576: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
577: . nghost - number of local ghost points
578: - ghosts - global indices of ghost points
580: Output Parameter:
581: . vv - the global vector representation (without ghost points as part of vector)
582:
583: Notes:
584: Use VecGhostGetLocalForm() to access the local, ghosted representation
585: of the vector.
587: Level: advanced
589: Concepts: vectors^ghosted
591: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
592: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
593: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd()
595: @*/
596: int VecCreateGhost(MPI_Comm comm,int n,int N,int nghost,const int ghosts[],Vec *vv)
597: {
601: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
602: return(0);
603: }
605: int VecDuplicate_MPI(Vec win,Vec *v)
606: {
607: int ierr;
608: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
609: PetscScalar *array;
610: #if defined(PETSC_HAVE_AMS)
611: int (*f)(AMS_Memory,char *,Vec);
612: #endif
615: VecCreate(win->comm,v);
616: VecSetSizes(*v,win->n,win->N);
617: VecCreate_MPI_Private(*v,w->nghost,0,win->map);
618: vw = (Vec_MPI *)(*v)->data;
619: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
621: /* save local representation of the parallel vector (and scatter) if it exists */
622: if (w->localrep) {
623: VecGetArray(*v,&array);
624: VecCreateSeqWithArray(PETSC_COMM_SELF,win->n+w->nghost,array,&vw->localrep);
625: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
626: VecRestoreArray(*v,&array);
627: PetscLogObjectParent(*v,vw->localrep);
628: vw->localupdate = w->localupdate;
629: if (vw->localupdate) {
630: PetscObjectReference((PetscObject)vw->localupdate);
631: }
632: }
634: /* New vector should inherit stashing property of parent */
635: (*v)->stash.donotstash = win->stash.donotstash;
636:
637: PetscOListDuplicate(win->olist,&(*v)->olist);
638: PetscFListDuplicate(win->qlist,&(*v)->qlist);
639: if (win->mapping) {
640: (*v)->mapping = win->mapping;
641: PetscObjectReference((PetscObject)win->mapping);
642: }
643: if (win->bmapping) {
644: (*v)->bmapping = win->bmapping;
645: PetscObjectReference((PetscObject)win->bmapping);
646: }
647: (*v)->bs = win->bs;
648: (*v)->bstash.bs = win->bstash.bs;
650: #if defined(PETSC_HAVE_AMS)
651: /*
652: If the vector knows its "layout" let it set it, otherwise it defaults
653: to correct 1d distribution
654: */
655: PetscObjectQueryFunction((PetscObject)(*v),"AMSSetFieldBlock_C",(void (**)(void))&f);
656: if (f) {
657: (*f)((AMS_Memory)(*v)->amem,"values",*v);
658: }
659: #endif
660: return(0);
661: }
663: /* ------------------------------------------------------------------------------------------*/
664: /*@C
665: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
666: the caller allocates the array space. Indices in the ghost region are based on blocks.
668: Collective on MPI_Comm
670: Input Parameters:
671: + comm - the MPI communicator to use
672: . bs - block size
673: . n - local vector length
674: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
675: . nghost - number of local ghost blocks
676: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
677: - array - the space to store the vector values (as long as n + nghost*bs)
679: Output Parameter:
680: . vv - the global vector representation (without ghost points as part of vector)
681:
682: Notes:
683: Use VecGhostGetLocalForm() to access the local, ghosted representation
684: of the vector.
686: n is the local vector size (total local size not the number of blocks) while nghost
687: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
688: portion is bs*nghost
690: Level: advanced
692: Concepts: vectors^creating ghosted
693: Concepts: vectors^creating with array
695: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
696: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
697: VecCreateGhostWithArray(), VecCreateGhostBlocked()
699: @*/
700: int VecCreateGhostBlockWithArray(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],const PetscScalar array[],Vec *vv)
701: {
702: int ierr;
703: Vec_MPI *w;
704: PetscScalar *larray;
707: *vv = 0;
709: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
710: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
711: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
712: PetscSplitOwnership(comm,&n,&N);
713: /* Create global representation */
714: VecCreate(comm,vv);
715: VecSetSizes(*vv,n,N);
716: VecCreate_MPI_Private(*vv,nghost*bs,array,PETSC_NULL);
717: VecSetBlockSize(*vv,bs);
718: w = (Vec_MPI *)(*vv)->data;
719: /* Create local representation */
720: VecGetArray(*vv,&larray);
721: VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
722: VecSetBlockSize(w->localrep,bs);
723: PetscLogObjectParent(*vv,w->localrep);
724: VecRestoreArray(*vv,&larray);
726: /*
727: Create scatter context for scattering (updating) ghost values
728: */
729: if (ghosts) {
730: IS from,to;
731:
732: ISCreateBlock(comm,bs,nghost,ghosts,&from);
733: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
734: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
735: PetscLogObjectParent(*vv,w->localupdate);
736: ISDestroy(to);
737: ISDestroy(from);
738: }
740: return(0);
741: }
743: /*@C
744: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
745: The indicing of the ghost points is done with blocks.
747: Collective on MPI_Comm
749: Input Parameters:
750: + comm - the MPI communicator to use
751: . bs - the block size
752: . n - local vector length
753: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
754: . nghost - number of local ghost blocks
755: - ghosts - global indices of ghost blocks
757: Output Parameter:
758: . vv - the global vector representation (without ghost points as part of vector)
759:
760: Notes:
761: Use VecGhostGetLocalForm() to access the local, ghosted representation
762: of the vector.
764: n is the local vector size (total local size not the number of blocks) while nghost
765: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
766: portion is bs*nghost
768: Level: advanced
770: Concepts: vectors^ghosted
772: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
773: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
774: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
776: @*/
777: int VecCreateGhostBlock(MPI_Comm comm,int bs,int n,int N,int nghost,const int ghosts[],Vec *vv)
778: {
782: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
783: return(0);
784: }
786: /*
787: These introduce a ghosted vector where the ghosting is determined by the call to
788: VecSetLocalToGlobalMapping()
789: */
791: int VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
792: {
793: int ierr;
794: Vec_MPI *v = (Vec_MPI *)vv->data;
797: v->nghost = map->n - vv->n;
799: /* we need to make longer the array space that was allocated when the vector was created */
800: ierr = PetscFree(v->array_allocated);
801: ierr = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
802: v->array = v->array_allocated;
803:
804: /* Create local representation */
805: VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
806: PetscLogObjectParent(vv,v->localrep);
808: return(0);
809: }
812: int VecSetValuesLocal_FETI(Vec vv,int n,const int *ix,const PetscScalar *values,InsertMode mode)
813: {
814: int ierr;
815: Vec_MPI *v = (Vec_MPI *)vv->data;
818: VecSetValues(v->localrep,n,ix,values,mode);
819: return(0);
820: }
822: EXTERN_C_BEGIN
823: int VecCreate_FETI(Vec vv)
824: {
828: VecSetType(vv,VECMPI);
829:
830: /* overwrite the functions to handle setting values locally */
831: vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
832: vv->ops->setvalueslocal = VecSetValuesLocal_FETI;
833: vv->ops->assemblybegin = 0;
834: vv->ops->assemblyend = 0;
835: vv->ops->setvaluesblocked = 0;
836: vv->ops->setvaluesblocked = 0;
838: return(0);
839: }
840: EXTERN_C_END