Actual source code: da1.c
1: /*$Id: da1.c,v 1.129 2001/09/07 20:12:17 bsmith Exp $*/
3: /*
4: Code for manipulating distributed regular 1d arrays in parallel.
5: This file was created by Peter Mell 6/30/95
6: */
8: #include src/dm/da/daimpl.h
10: #if defined (PETSC_HAVE_AMS)
11: EXTERN_C_BEGIN
12: EXTERN int AMSSetFieldBlock_DA(AMS_Memory,char *,Vec);
13: EXTERN_C_END
14: #endif
16: int DAView_1d(DA da,PetscViewer viewer)
17: {
18: int rank,ierr;
19: PetscTruth isascii,isdraw,isbinary;
22: MPI_Comm_rank(da->comm,&rank);
24: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
25: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
26: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
27: if (isascii) {
28: PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %d m %d w %d s %dn",rank,da->M,
29: da->m,da->w,da->s);
30: PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %d %dn",da->xs,da->xe);
31: PetscViewerFlush(viewer);
32: } else if (isdraw) {
33: PetscDraw draw;
34: double ymin = -1,ymax = 1,xmin = -1,xmax = da->M,x;
35: int base;
36: char node[10];
37: PetscTruth isnull;
39: PetscViewerDrawGetDraw(viewer,0,&draw);
40: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
42: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
43: PetscDrawSynchronizedClear(draw);
45: /* first processor draws all node lines */
46: if (!rank) {
47: int xmin_tmp;
48: ymin = 0.0; ymax = 0.3;
49:
50: /* ADIC doesn't like doubles in a for loop */
51: for (xmin_tmp =0; xmin_tmp < (int)da->M; xmin_tmp++) {
52: PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
53: }
55: xmin = 0.0; xmax = da->M - 1;
56: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
57: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
58: }
60: PetscDrawSynchronizedFlush(draw);
61: PetscDrawPause(draw);
63: /* draw my box */
64: ymin = 0; ymax = 0.3; xmin = da->xs / da->w; xmax = (da->xe / da->w) - 1;
65: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
66: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
67: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
68: PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
70: /* Put in index numbers */
71: base = da->base / da->w;
72: for (x=xmin; x<=xmax; x++) {
73: sprintf(node,"%d",base++);
74: PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
75: }
77: PetscDrawSynchronizedFlush(draw);
78: PetscDrawPause(draw);
79: } else if (isbinary) {
80: DAView_Binary(da,viewer);
81: } else {
82: SETERRQ1(1,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
83: }
84: return(0);
85: }
87: EXTERN int DAPublish_Petsc(PetscObject);
89: /*@C
90: DACreate1d - Creates an object that will manage the communication of one-dimensional
91: regular array data that is distributed across some processors.
93: Collective on MPI_Comm
95: Input Parameters:
96: + comm - MPI communicator
97: . wrap - type of periodicity should the array have, if any. Use
98: either DA_NONPERIODIC or DA_XPERIODIC
99: . M - global dimension of the array
100: . dof - number of degrees of freedom per node
101: . lc - array containing number of nodes in the X direction on each processor,
102: or PETSC_NULL. If non-null, must be of length as m.
103: - s - stencil width
105: Output Parameter:
106: . inra - the resulting distributed array object
108: Options Database Key:
109: + -da_view - Calls DAView() at the conclusion of DACreate1d()
110: . -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
111: - -da_noao - do not compute natural to PETSc ordering object
113: Level: beginner
115: Notes:
116: If you are having problems with running out of memory than run with the option -da_noao
118: The array data itself is NOT stored in the DA, it is stored in Vec objects;
119: The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
120: and DACreateLocalVector() and calls to VecDuplicate() if more are needed.
123: .keywords: distributed array, create, one-dimensional
125: .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(),
126: DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(),
127: DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView()
129: @*/
130: int DACreate1d(MPI_Comm comm,DAPeriodicType wrap,int M,int dof,int s,int *lc,DA *inra)
131: {
132: int rank,size,xs,xe,x,Xs,Xe,ierr,start,end,m;
133: int i,*idx,nn,j,left,refine_x = 2,tM = M;
134: PetscTruth flg1,flg2;
135: DA da;
136: Vec local,global;
137: VecScatter ltog,gtol;
138: IS to,from;
142: *inra = 0;
143: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
144: DMInitializePackage(PETSC_NULL);
145: #endif
147: if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %d",dof);
148: if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %d",s);
150: PetscOptionsBegin(comm,PETSC_NULL,"1d DA Options","DA");
151: if (M < 0) {
152: tM = -M;
153: PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate1d",tM,&tM,PETSC_NULL);
154: }
155: PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DACreate1d",refine_x,&refine_x,PETSC_NULL);
156: PetscOptionsEnd();
157: M = tM;
159: PetscHeaderCreate(da,_p_DA,struct _DAOps,DA_COOKIE,0,"DA",comm,DADestroy,DAView);
160: PetscLogObjectCreate(da);
161: da->bops->publish = DAPublish_Petsc;
162: da->ops->createglobalvector = DACreateGlobalVector;
163: da->ops->getinterpolation = DAGetInterpolation;
164: da->ops->getcoloring = DAGetColoring;
165: da->ops->getmatrix = DAGetMatrix;
166: da->ops->refine = DARefine;
167: PetscLogObjectMemory(da,sizeof(struct _p_DA));
168: da->dim = 1;
169: da->interptype = DA_Q1;
170: da->refine_x = refine_x;
171: PetscMalloc(dof*sizeof(char*),&da->fieldname);
172: PetscMemzero(da->fieldname,dof*sizeof(char*));
173: MPI_Comm_size(comm,&size);
174: MPI_Comm_rank(comm,&rank);
176: m = size;
178: if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"More processors than data points! %d %d",m,M);
179: if ((M-1) < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %d %d",M-1,s);
181: /*
182: Determine locally owned region
183: xs is the first local node number, x is the number of local nodes
184: */
185: if (!lc) {
186: PetscOptionsHasName(PETSC_NULL,"-da_partition_blockcomm",&flg1);
187: PetscOptionsHasName(PETSC_NULL,"-da_partition_nodes_at_end",&flg2);
188: if (flg1) { /* Block Comm type Distribution */
189: xs = rank*M/m;
190: x = (rank + 1)*M/m - xs;
191: } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
192: x = (M + rank)/m;
193: if (M/m == x) { xs = rank*x; }
194: else { xs = rank*(x-1) + (M+rank)%(x*m); }
195: } else { /* The odd nodes are evenly distributed across the first k nodes */
196: /* Regular PETSc Distribution */
197: x = M/m + ((M % m) > rank);
198: if (rank >= (M % m)) {xs = (rank * (int)(M/m) + M % m);}
199: else {xs = rank * (int)(M/m) + rank;}
200: }
201: } else {
202: x = lc[rank];
203: xs = 0;
204: for (i=0; i<rank; i++) {
205: xs += lc[i];
206: }
207: /* verify that data user provided is consistent */
208: left = xs;
209: for (i=rank; i<size; i++) {
210: left += lc[i];
211: }
212: if (left != M) {
213: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lc across processors not equal to M %d %d",left,M);
214: }
215: }
217: /* From now on x,s,xs,xe,Xs,Xe are the exact location in the array */
218: x *= dof;
219: s *= dof; /* NOTE: here change s to be absolute stencil distance */
220: xs *= dof;
221: xe = xs + x;
223: /* determine ghost region */
224: if (wrap == DA_XPERIODIC) {
225: Xs = xs - s;
226: Xe = xe + s;
227: } else {
228: if ((xs-s) >= 0) Xs = xs-s; else Xs = 0;
229: if ((xe+s) <= M*dof) Xe = xe+s; else Xe = M*dof;
230: }
232: /* allocate the base parallel and sequential vectors */
233: VecCreateMPI(comm,x,PETSC_DECIDE,&global);
234: VecSetBlockSize(global,dof);
235: VecCreateSeq(PETSC_COMM_SELF,(Xe-Xs),&local);
236: VecSetBlockSize(local,dof);
237:
238: /* Create Local to Global Vector Scatter Context */
239: /* local to global inserts non-ghost point region into global */
240: VecGetOwnershipRange(global,&start,&end);
241: ISCreateStride(comm,x,start,1,&to);
242: ISCreateStride(comm,x,xs-Xs,1,&from);
243: VecScatterCreate(local,from,global,to,<og);
244: PetscLogObjectParent(da,to);
245: PetscLogObjectParent(da,from);
246: PetscLogObjectParent(da,ltog);
247: ISDestroy(from);
248: ISDestroy(to);
250: /* Create Global to Local Vector Scatter Context */
251: /* global to local must retrieve ghost points */
252: ISCreateStride(comm,(Xe-Xs),0,1,&to);
253:
254: PetscMalloc((x+2*s)*sizeof(int),&idx);
255: PetscLogObjectMemory(da,(x+2*s)*sizeof(int));
257: nn = 0;
258: if (wrap == DA_XPERIODIC) { /* Handle all cases with wrap first */
260: for (i=0; i<s; i++) { /* Left ghost points */
261: if ((xs-s+i)>=0) { idx[nn++] = xs-s+i;}
262: else { idx[nn++] = M*dof+(xs-s+i);}
263: }
265: for (i=0; i<x; i++) { idx [nn++] = xs + i;} /* Non-ghost points */
266:
267: for (i=0; i<s; i++) { /* Right ghost points */
268: if ((xe+i)<M*dof) { idx [nn++] = xe+i; }
269: else { idx [nn++] = (xe+i) - M*dof;}
270: }
271: } else { /* Now do all cases with no wrapping */
273: if (s <= xs) {for (i=0; i<s; i++) {idx[nn++] = xs - s + i;}}
274: else {for (i=0; i<xs; i++) {idx[nn++] = i;}}
276: for (i=0; i<x; i++) { idx [nn++] = xs + i;}
277:
278: if ((xe+s)<=M*dof) {for (i=0; i<s; i++) {idx[nn++]=xe+i;}}
279: else {for (i=xe; i<(M*dof); i++) {idx[nn++]=i; }}
280: }
282: ISCreateGeneral(comm,nn,idx,&from);
283: VecScatterCreate(global,from,local,to,>ol);
284: PetscLogObjectParent(da,to);
285: PetscLogObjectParent(da,from);
286: PetscLogObjectParent(da,gtol);
287: ISDestroy(to);
288: ISDestroy(from);
290: da->M = M; da->N = 1; da->m = m; da->n = 1;
291: da->xs = xs; da->xe = xe; da->ys = 0; da->ye = 1; da->zs = 0; da->ze = 1;
292: da->Xs = Xs; da->Xe = Xe; da->Ys = 0; da->Ye = 1; da->Zs = 0; da->Ze = 1;
293: da->P = 1; da->p = 1; da->w = dof; da->s = s/dof;
295: PetscLogObjectParent(da,global);
296: PetscLogObjectParent(da,local);
298: da->global = global;
299: da->local = local;
300: da->gtol = gtol;
301: da->ltog = ltog;
302: da->idx = idx;
303: da->Nl = nn;
304: da->base = xs;
305: da->ops->view = DAView_1d;
306: da->wrap = wrap;
307: da->stencil_type = DA_STENCIL_STAR;
309: /*
310: Set the local to global ordering in the global vector, this allows use
311: of VecSetValuesLocal().
312: */
313: ISLocalToGlobalMappingCreate(comm,nn,idx,&da->ltogmap);
314: VecSetLocalToGlobalMapping(da->global,da->ltogmap);
315: ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
316: VecSetLocalToGlobalMappingBlock(da->global,da->ltogmapb);
317: PetscLogObjectParent(da,da->ltogmap);
319: /* construct the local to local scatter context */
320: /*
321: We simply remap the values in the from part of
322: global to local to read from an array with the ghost values
323: rather then from the plain array.
324: */
325: VecScatterCopy(gtol,&da->ltol);
326: PetscLogObjectParent(da,da->ltol);
327: left = xs - Xs;
328: PetscMalloc((Xe-Xs)*sizeof(int),&idx);
329: for (j=0; j<Xe-Xs; j++) {
330: idx[j] = left + j;
331: }
332: VecScatterRemap(da->ltol,idx,PETSC_NULL);
333: PetscFree(idx);
335: /*
336: Build the natural ordering to PETSc ordering mappings.
337: */
338: PetscOptionsHasName(PETSC_NULL,"-da_noao",&flg1);
339: if (!flg1) {
340: IS is;
341:
342: ISCreateStride(comm,da->xe-da->xs,da->base,1,&is);
343: AOCreateBasicIS(is,is,&da->ao);
344: PetscLogObjectParent(da,da->ao);
345: ISDestroy(is);
346: } else {
347: da->ao = PETSC_NULL;
348: }
351: PetscOptionsHasName(PETSC_NULL,"-da_view",&flg1);
352: if (flg1) {DAView(da,PETSC_VIEWER_STDOUT_(da->comm));}
353: PetscOptionsHasName(PETSC_NULL,"-da_view_draw",&flg1);
354: if (flg1) {DAView(da,PETSC_VIEWER_DRAW_(da->comm));}
355: PetscOptionsHasName(PETSC_NULL,"-help",&flg1);
356: if (flg1) {DAPrintHelp(da);}
357: *inra = da;
358: PetscPublishAll(da);
359: #if defined(PETSC_HAVE_AMS)
360: PetscObjectComposeFunctionDynamic((PetscObject)global,"AMSSetFieldBlock_C",
361: "AMSSetFieldBlock_DA",AMSSetFieldBlock_DA);
362: PetscObjectComposeFunctionDynamic((PetscObject)local,"AMSSetFieldBlock_C",
363: "AMSSetFieldBlock_DA",AMSSetFieldBlock_DA);
364: if (((PetscObject)global)->amem > -1) {
365: AMSSetFieldBlock_DA(((PetscObject)global)->amem,"values",global);
366: }
367: #endif
368: VecSetOperation(global,VECOP_VIEW,(void(*)(void))VecView_MPI_DA);
369: VecSetOperation(global,VECOP_LOADINTOVECTOR,(void(*)(void))VecLoadIntoVector_Binary_DA);
370: return(0);
371: }