Actual source code: ex4.c
petsc-dev 2014-02-02
2: static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
4: #include <petscdmda.h>
8: int main(int argc,char **argv)
9: {
10: PetscMPIInt rank;
11: PetscErrorCode ierr;
12: PetscInt M = 10,N = 8,m = PETSC_DECIDE;
13: PetscInt s =2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
14: PetscInt Xs,Xm,Ys,Ym,iloc,*iglobal;
15: const PetscInt *ltog;
16: PetscInt *lx = NULL,*ly = NULL;
17: PetscBool testorder = PETSC_FALSE,flg;
18: DMDABoundaryType bx = DMDA_BOUNDARY_NONE,by= DMDA_BOUNDARY_NONE;
19: DM da;
20: PetscViewer viewer;
21: Vec local,global;
22: PetscScalar value;
23: DMDAStencilType st = DMDA_STENCIL_BOX;
24: AO ao;
26: PetscInitialize(&argc,&argv,(char*)0,help);
27: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer);
29: /* Readoptions */
30: PetscOptionsGetInt(NULL,"-NX",&M,NULL);
31: PetscOptionsGetInt(NULL,"-NY",&N,NULL);
32: PetscOptionsGetInt(NULL,"-m",&m,NULL);
33: PetscOptionsGetInt(NULL,"-n",&n,NULL);
34: PetscOptionsGetInt(NULL,"-s",&s,NULL);
35: PetscOptionsGetInt(NULL,"-w",&w,NULL);
37: flg = PETSC_FALSE;
38: PetscOptionsGetBool(NULL,"-xperiodic",&flg,NULL); if (flg) bx = DMDA_BOUNDARY_PERIODIC;
39: flg = PETSC_FALSE;
40: PetscOptionsGetBool(NULL,"-yperiodic",&flg,NULL); if (flg) by = DMDA_BOUNDARY_PERIODIC;
41: flg = PETSC_FALSE;
42: PetscOptionsGetBool(NULL,"-xghosted",&flg,NULL); if (flg) bx = DMDA_BOUNDARY_GHOSTED;
43: flg = PETSC_FALSE;
44: PetscOptionsGetBool(NULL,"-yghosted",&flg,NULL); if (flg) by = DMDA_BOUNDARY_GHOSTED;
45: flg = PETSC_FALSE;
46: PetscOptionsGetBool(NULL,"-star",&flg,NULL); if (flg) st = DMDA_STENCIL_STAR;
47: flg = PETSC_FALSE;
48: PetscOptionsGetBool(NULL,"-box",&flg,NULL); if (flg) st = DMDA_STENCIL_BOX;
49: flg = PETSC_FALSE;
50: PetscOptionsGetBool(NULL,"-testorder",&testorder,NULL);
51: /*
52: Test putting two nodes in x and y on each processor, exact last processor
53: in x and y gets the rest.
54: */
55: flg = PETSC_FALSE;
56: PetscOptionsGetBool(NULL,"-distribute",&flg,NULL);
57: if (flg) {
58: if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -m option with -distribute option");
59: PetscMalloc1(m,&lx);
60: for (i=0; i<m-1; i++) { lx[i] = 4;}
61: lx[m-1] = M - 4*(m-1);
62: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -n option with -distribute option");
63: PetscMalloc1(n,&ly);
64: for (i=0; i<n-1; i++) { ly[i] = 2;}
65: ly[n-1] = N - 2*(n-1);
66: }
69: /* Create distributed array and get vectors */
70: DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da);
71: PetscFree(lx);
72: PetscFree(ly);
74: DMView(da,viewer);
75: DMCreateGlobalVector(da,&global);
76: DMCreateLocalVector(da,&local);
78: /* Set global vector; send ghost points to local vectors */
79: value = 1;
80: VecSet(global,value);
81: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
82: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
84: /* Scale local vectors according to processor rank; pass to global vector */
85: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
86: value = rank;
87: VecScale(local,value);
88: DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
89: DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);
91: if (!testorder) { /* turn off printing when testing ordering mappings */
92: PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vectors:\n");
93: VecView(global,PETSC_VIEWER_STDOUT_WORLD);
94: PetscPrintf(PETSC_COMM_WORLD,"\n\n");
95: }
97: /* Send ghost points to local vectors */
98: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
99: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
101: flg = PETSC_FALSE;
102: PetscOptionsGetBool(NULL,"-local_print",&flg,NULL);
103: if (flg) {
104: PetscViewer sviewer;
105:
106: PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);
107: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
108: PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
109: VecView(local,sviewer);
110: PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
111: }
113: /* Tests mappings betweeen application/PETSc orderings */
114: if (testorder) {
115: DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL);
116: DMDAGetGlobalIndices(da,&nloc,<og);
117: DMDAGetAO(da,&ao);
118: PetscMalloc1(nloc,&iglobal);
120: /* Set iglobal to be global indices for each processor's local and ghost nodes,
121: using the DMDA ordering of grid points */
122: kk = 0;
123: for (j=Ys; j<Ys+Ym; j++) {
124: for (i=Xs; i<Xs+Xm; i++) {
125: iloc = w*((j-Ys)*Xm + i-Xs);
126: for (l=0; l<w; l++) {
127: iglobal[kk++] = ltog[iloc+l];
128: }
129: }
130: }
132: /* Map this to the application ordering (which for DMDAs is just the natural ordering
133: that would be used for 1 processor, numbering most rapidly by x, then y) */
134: AOPetscToApplication(ao,nloc,iglobal);
136: /* Then map the application ordering back to the PETSc DMDA ordering */
137: AOApplicationToPetsc(ao,nloc,iglobal);
139: /* Verify the mappings */
140: kk=0;
141: for (j=Ys; j<Ys+Ym; j++) {
142: for (i=Xs; i<Xs+Xm; i++) {
143: iloc = w*((j-Ys)*Xm + i-Xs);
144: for (l=0; l<w; l++) {
145: if (iglobal[kk] != ltog[iloc+l]) {
146: PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",
147: rank,j,i,l,ltog[iloc+l],iglobal[kk]);
148: }
149: kk++;
150: }
151: }
152: }
153: PetscFree(iglobal);
154: DMDARestoreGlobalIndices(da,&nloc,<og);
155: }
157: /* Free memory */
158: PetscViewerDestroy(&viewer);
159: VecDestroy(&local);
160: VecDestroy(&global);
161: DMDestroy(&da);
163: PetscFinalize();
164: return 0;
165: }