Actual source code: da1.c
1: /*$Id: da1.c,v 1.129 2001/09/07 20:12:17 bsmith Exp $*/
3: /*
4: Code for manipulating distributed regular 1d arrays in parallel.
5: This file was created by Peter Mell 6/30/95
6: */
8: #include src/dm/da/daimpl.h
10: #if defined (PETSC_HAVE_AMS)
11: EXTERN_C_BEGIN
12: EXTERN int AMSSetFieldBlock_DA(AMS_Memory,char *,Vec);
13: EXTERN_C_END
14: #endif
16: int DAView_1d(DA da,PetscViewer viewer)
17: {
18: int rank,ierr;
19: PetscTruth isascii,isdraw,isbinary;
22: MPI_Comm_rank(da->comm,&rank);
24: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
25: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
26: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
27: if (isascii) {
28: PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %d m %d w %d s %dn",rank,da->M,
29: da->m,da->w,da->s);
30: PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %d %dn",da->xs,da->xe);
31: PetscViewerFlush(viewer);
32: } else if (isdraw) {
33: PetscDraw draw;
34: double ymin = -1,ymax = 1,xmin = -1,xmax = da->M,x;
35: int base;
36: char node[10];
37: PetscTruth isnull;
39: PetscViewerDrawGetDraw(viewer,0,&draw);
40: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
42: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
43: PetscDrawSynchronizedClear(draw);
45: /* first processor draws all node lines */
46: if (!rank) {
47: int xmin_tmp;
48: ymin = 0.0; ymax = 0.3;
49:
50: /* ADIC doesn't like doubles in a for loop */
51: for (xmin_tmp =0; xmin_tmp < (int)da->M; xmin_tmp++) {
52: PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
53: }
55: xmin = 0.0; xmax = da->M - 1;
56: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
57: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
58: }
60: PetscDrawSynchronizedFlush(draw);
61: PetscDrawPause(draw);
63: /* draw my box */
64: ymin = 0; ymax = 0.3; xmin = da->xs / da->w; xmax = (da->xe / da->w) - 1;
65: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
66: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
67: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
68: PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
70: /* Put in index numbers */
71: base = da->base / da->w;
72: for (x=xmin; x<=xmax; x++) {
73: sprintf(node,"%d",base++);
74: PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
75: }
77: PetscDrawSynchronizedFlush(draw);
78: PetscDrawPause(draw);
79: } else if (isbinary) {
80: DAView_Binary(da,viewer);
81: } else {
82: SETERRQ1(1,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
83: }
84: return(0);
85: }
87: EXTERN int DAPublish_Petsc(PetscObject);
89: /*@C
90: DACreate1d - Creates an object that will manage the communication of one-dimensional
91: regular array data that is distributed across some processors.
93: Collective on MPI_Comm
95: Input Parameters:
96: + comm - MPI communicator
97: . wrap - type of periodicity should the array have, if any. Use
98: either DA_NONPERIODIC or DA_XPERIODIC
99: . M - global dimension of the array
100: . dof - number of degrees of freedom per node
101: . lc - array containing number of nodes in the X direction on each processor,
102: or PETSC_NULL. If non-null, must be of length as m.
103: - s - stencil width
105: Output Parameter:
106: . inra - the resulting distributed array object
108: Options Database Key:
109: + -da_view - Calls DAView() at the conclusion of DACreate1d()
110: . -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
111: - -da_noao - do not compute natural to PETSc ordering object
113: Level: beginner
115: Notes:
116: If you are having problems with running out of memory than run with the option -da_noao
118: The array data itself is NOT stored in the DA, it is stored in Vec objects;
119: The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
120: and DACreateLocalVector() and calls to VecDuplicate() if more are needed.
123: .keywords: distributed array, create, one-dimensional
125: .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(),
126: DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(),
127: DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView()
129: @*/
130: int DACreate1d(MPI_Comm comm,DAPeriodicType wrap,int M,int dof,int s,int *lc,DA *inra)
131: {
132: int rank,size,xs,xe,x,Xs,Xe,ierr,start,end,m;
133: int i,*idx,nn,j,left,gdim,refine_x = 2,tM = M;
134: PetscTruth flg1,flg2;
135: DA da;
136: Vec local,global;
137: VecScatter ltog,gtol;
138: IS to,from;
142: *inra = 0;
143: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
144: DMInitializePackage(PETSC_NULL);
145: #endif
147: if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %d",dof);
148: if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %d",s);
150: PetscOptionsBegin(comm,PETSC_NULL,"1d DA Options","DA");
151: if (M < 0) {
152: tM = -M;
153: PetscOptionsInt("-da_grid_x","Number of grid points in x direction","DACreate1d",tM,&tM,PETSC_NULL);
154: }
155: PetscOptionsInt("-da_refine_x","Refinement ratio in x direction","DACreate1d",refine_x,&refine_x,PETSC_NULL);
156: PetscOptionsEnd();
157: M = tM;
159: PetscHeaderCreate(da,_p_DA,struct _DAOps,DA_COOKIE,0,"DA",comm,DADestroy,DAView);
160: PetscLogObjectCreate(da);
161: da->bops->publish = DAPublish_Petsc;
162: da->ops->createglobalvector = DACreateGlobalVector;
163: da->ops->getinterpolation = DAGetInterpolation;
164: da->ops->getcoloring = DAGetColoring;
165: da->ops->getmatrix = DAGetMatrix;
166: da->ops->refine = DARefine;
167: PetscLogObjectMemory(da,sizeof(struct _p_DA));
168: da->dim = 1;
169: da->interptype = DA_Q1;
170: da->gtog1 = 0;
171: da->refine_x = refine_x;
172: PetscMalloc(dof*sizeof(char*),&da->fieldname);
173: PetscMemzero(da->fieldname,dof*sizeof(char*));
174: MPI_Comm_size(comm,&size);
175: MPI_Comm_rank(comm,&rank);
177: m = size;
179: if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"More processors than data points! %d %d",m,M);
180: if ((M-1) < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %d %d",M-1,s);
182: /*
183: Determine locally owned region
184: xs is the first local node number, x is the number of local nodes
185: */
186: if (!lc) {
187: PetscOptionsHasName(PETSC_NULL,"-da_partition_blockcomm",&flg1);
188: PetscOptionsHasName(PETSC_NULL,"-da_partition_nodes_at_end",&flg2);
189: if (flg1) { /* Block Comm type Distribution */
190: xs = rank*M/m;
191: x = (rank + 1)*M/m - xs;
192: } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
193: x = (M + rank)/m;
194: if (M/m == x) { xs = rank*x; }
195: else { xs = rank*(x-1) + (M+rank)%(x*m); }
196: } else { /* The odd nodes are evenly distributed across the first k nodes */
197: /* Regular PETSc Distribution */
198: x = M/m + ((M % m) > rank);
199: if (rank >= (M % m)) {xs = (rank * (int)(M/m) + M % m);}
200: else {xs = rank * (int)(M/m) + rank;}
201: }
202: } else {
203: x = lc[rank];
204: xs = 0;
205: for (i=0; i<rank; i++) {
206: xs += lc[i];
207: }
208: /* verify that data user provided is consistent */
209: left = xs;
210: for (i=rank; i<size; i++) {
211: left += lc[i];
212: }
213: if (left != M) {
214: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lc across processors not equal to M %d %d",left,M);
215: }
216: }
218: /* From now on x,s,xs,xe,Xs,Xe are the exact location in the array */
219: x *= dof;
220: s *= dof; /* NOTE: here change s to be absolute stencil distance */
221: xs *= dof;
222: xe = xs + x;
224: /* determine ghost region */
225: if (wrap == DA_XPERIODIC) {
226: Xs = xs - s;
227: Xe = xe + s;
228: } else {
229: if ((xs-s) >= 0) Xs = xs-s; else Xs = 0;
230: if ((xe+s) <= M*dof) Xe = xe+s; else Xe = M*dof;
231: }
233: /* allocate the base parallel and sequential vectors */
234: VecCreateMPI(comm,x,PETSC_DECIDE,&global);
235: VecSetBlockSize(global,dof);
236: VecCreateSeq(PETSC_COMM_SELF,(Xe-Xs),&local);
237: VecSetBlockSize(local,dof);
238:
239: /* Create Local to Global Vector Scatter Context */
240: /* local to global inserts non-ghost point region into global */
241: VecGetOwnershipRange(global,&start,&end);
242: ISCreateStride(comm,x,start,1,&to);
243: ISCreateStride(comm,x,xs-Xs,1,&from);
244: VecScatterCreate(local,from,global,to,<og);
245: PetscLogObjectParent(da,to);
246: PetscLogObjectParent(da,from);
247: PetscLogObjectParent(da,ltog);
248: ISDestroy(from);
249: ISDestroy(to);
251: /* Create Global to Local Vector Scatter Context */
252: /* global to local must retrieve ghost points */
253: ISCreateStride(comm,(Xe-Xs),0,1,&to);
254:
255: PetscMalloc((x+2*s)*sizeof(int),&idx);
256: PetscLogObjectMemory(da,(x+2*s)*sizeof(int));
258: nn = 0;
259: if (wrap == DA_XPERIODIC) { /* Handle all cases with wrap first */
261: for (i=0; i<s; i++) { /* Left ghost points */
262: if ((xs-s+i)>=0) { idx[nn++] = xs-s+i;}
263: else { idx[nn++] = M*dof+(xs-s+i);}
264: }
266: for (i=0; i<x; i++) { idx [nn++] = xs + i;} /* Non-ghost points */
267:
268: for (i=0; i<s; i++) { /* Right ghost points */
269: if ((xe+i)<M*dof) { idx [nn++] = xe+i; }
270: else { idx [nn++] = (xe+i) - M*dof;}
271: }
272: } else { /* Now do all cases with no wrapping */
274: if (s <= xs) {for (i=0; i<s; i++) {idx[nn++] = xs - s + i;}}
275: else {for (i=0; i<xs; i++) {idx[nn++] = i;}}
277: for (i=0; i<x; i++) { idx [nn++] = xs + i;}
278:
279: if ((xe+s)<=M*dof) {for (i=0; i<s; i++) {idx[nn++]=xe+i;}}
280: else {for (i=xe; i<(M*dof); i++) {idx[nn++]=i; }}
281: }
283: ISCreateGeneral(comm,nn,idx,&from);
284: VecScatterCreate(global,from,local,to,>ol);
285: PetscLogObjectParent(da,to);
286: PetscLogObjectParent(da,from);
287: PetscLogObjectParent(da,gtol);
288: ISDestroy(to);
289: ISDestroy(from);
291: da->M = M; da->N = 1; da->m = m; da->n = 1;
292: da->xs = xs; da->xe = xe; da->ys = 0; da->ye = 1; da->zs = 0; da->ze = 1;
293: da->Xs = Xs; da->Xe = Xe; da->Ys = 0; da->Ye = 1; da->Zs = 0; da->Ze = 1;
294: da->P = 1; da->p = 1; da->w = dof; da->s = s/dof;
296: PetscLogObjectParent(da,global);
297: PetscLogObjectParent(da,local);
299: da->global = global;
300: da->local = local;
301: da->gtol = gtol;
302: da->ltog = ltog;
303: da->idx = idx;
304: da->Nl = nn;
305: da->base = xs;
306: da->ops->view = DAView_1d;
307: da->wrap = wrap;
308: da->stencil_type = DA_STENCIL_STAR;
310: /*
311: Set the local to global ordering in the global vector, this allows use
312: of VecSetValuesLocal().
313: */
314: ISLocalToGlobalMappingCreate(comm,nn,idx,&da->ltogmap);
315: VecSetLocalToGlobalMapping(da->global,da->ltogmap);
316: ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
317: VecSetLocalToGlobalMappingBlock(da->global,da->ltogmapb);
318: PetscLogObjectParent(da,da->ltogmap);
320: /* construct the local to local scatter context */
321: /*
322: We simply remap the values in the from part of
323: global to local to read from an array with the ghost values
324: rather then from the plain array.
325: */
326: VecScatterCopy(gtol,&da->ltol);
327: PetscLogObjectParent(da,da->ltol);
328: left = xs - Xs;
329: PetscMalloc((Xe-Xs)*sizeof(int),&idx);
330: for (j=0; j<Xe-Xs; j++) {
331: idx[j] = left + j;
332: }
333: VecScatterRemap(da->ltol,idx,PETSC_NULL);
334: PetscFree(idx);
336: /*
337: Build the natural ordering to PETSc ordering mappings.
338: */
339: PetscOptionsHasName(PETSC_NULL,"-da_noao",&flg1);
340: if (!flg1) {
341: IS is;
342:
343: ISCreateStride(comm,da->xe-da->xs,da->base,1,&is);
344: AOCreateBasicIS(is,is,&da->ao);
345: PetscLogObjectParent(da,da->ao);
346: ISDestroy(is);
347: } else {
348: da->ao = PETSC_NULL;
349: }
351: /*
352: Note the following will be removed soon. Since the functionality
353: is replaced by the above.
354: */
355: /* Construct the mapping from current global ordering to global
356: ordering that would be used if only 1 processor were employed.
357: This mapping is intended only for internal use by discrete
358: function and matrix viewers.
360: We don't really need this for 1D distributed arrays, since the
361: ordering is the same regardless. But for now we form it anyway
362: Maybe we'll change in the near future.
363: */
364: VecGetSize(global,&gdim);
365: PetscMalloc(gdim*sizeof(int),&da->gtog1);
366: PetscLogObjectMemory(da,gdim*sizeof(int));
367: for (i=0; i<gdim; i++) da->gtog1[i] = i;
369: PetscOptionsHasName(PETSC_NULL,"-da_view",&flg1);
370: if (flg1) {DAView(da,PETSC_VIEWER_STDOUT_(da->comm));}
371: PetscOptionsHasName(PETSC_NULL,"-da_view_draw",&flg1);
372: if (flg1) {DAView(da,PETSC_VIEWER_DRAW_(da->comm));}
373: PetscOptionsHasName(PETSC_NULL,"-help",&flg1);
374: if (flg1) {DAPrintHelp(da);}
375: *inra = da;
376: PetscPublishAll(da);
377: #if defined(PETSC_HAVE_AMS)
378: PetscObjectComposeFunctionDynamic((PetscObject)global,"AMSSetFieldBlock_C",
379: "AMSSetFieldBlock_DA",AMSSetFieldBlock_DA);
380: PetscObjectComposeFunctionDynamic((PetscObject)local,"AMSSetFieldBlock_C",
381: "AMSSetFieldBlock_DA",AMSSetFieldBlock_DA);
382: if (((PetscObject)global)->amem > -1) {
383: AMSSetFieldBlock_DA(((PetscObject)global)->amem,"values",global);
384: }
385: #endif
386: VecSetOperation(global,VECOP_VIEW,(void(*)(void))VecView_MPI_DA);
387: VecSetOperation(global,VECOP_LOADINTOVECTOR,(void(*)(void))VecLoadIntoVector_Binary_DA);
388: return(0);
389: }