Actual source code: ex11.c
petsc-dev 2014-02-02
2: static char help[] ="Solves the 2-dim Bratu (SFI - solid fuel ignition) test problem, where\n\
3: analytic formation of the Jacobian is the default. \n\
4: \n\
5: Solves the linear systems via 2 level additive Schwarz \n\
6: \n\
7: The command line\n\
8: options are:\n\
9: -par <parameter>, where <parameter> indicates the problem's nonlinearity\n\
10: problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)\n\
11: -Mx <xg>, where <xg> = number of grid points in the x-direction on coarse grid\n\
12: -My <yg>, where <yg> = number of grid points in the y-direction on coarse grid\n\n";
14: /*
15: 1) Solid Fuel Ignition (SFI) problem. This problem is modeled by
16: the partial differential equation
18: -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1 ,
20: with boundary conditions
22: u = 0 for x = 0, x = 1, y = 0, y = 1.
24: A finite difference approximation with the usual 5-point stencil
25: is used to discretize the boundary value problem to obtain a nonlinear
26: system of equations.
28: The code has two cases for multilevel solver
29: I. the coarse grid Jacobian is computed in parallel
30: II. the coarse grid Jacobian is computed sequentially on each processor
31: in both cases the coarse problem is SOLVED redundantly.
33: */
35: #include <petscsnes.h>
36: #include <petscdmda.h>
38: /* User-defined application contexts */
40: typedef struct {
41: PetscInt mx,my; /* number grid points in x and y direction */
42: Vec localX,localF; /* local vectors with ghost region */
43: DM da;
44: Vec x,b,r; /* global vectors */
45: Mat J; /* Jacobian on grid */
46: } GridCtx;
48: typedef struct {
49: PetscReal param; /* test problem parameter */
50: GridCtx fine;
51: GridCtx coarse;
52: KSP ksp_coarse;
53: KSP ksp_fine;
54: PetscInt ratio;
55: Mat R; /* restriction fine to coarse */
56: Vec Rscale;
57: PetscBool redundant_build; /* build coarse matrix redundantly */
58: Vec localall; /* contains entire coarse vector on each processor in NATURAL order*/
59: VecScatter tolocalall; /* maps from parallel "global" coarse vector to localall */
60: VecScatter fromlocalall; /* maps from localall vector back to global coarse vector */
61: } AppCtx;
63: #define COARSE_LEVEL 0
64: #define FINE_LEVEL 1
66: extern PetscErrorCode FormFunction(SNES,Vec,Vec,void*), FormInitialGuess1(AppCtx*,Vec);
67: extern PetscErrorCode FormJacobian(SNES,Vec,Mat*,Mat*,MatStructure*,void*);
68: extern PetscErrorCode FormInterpolation(AppCtx*);
70: /*
71: Mm_ratio - ration of grid lines between fine and coarse grids.
72: */
75: int main(int argc, char **argv)
76: {
77: SNES snes;
78: AppCtx user;
80: PetscInt its, N, n, Nx = PETSC_DECIDE, Ny = PETSC_DECIDE, nlocal,Nlocal;
81: PetscMPIInt size;
82: PetscReal bratu_lambda_max = 6.81, bratu_lambda_min = 0.;
83: KSP ksp;
84: PC pc;
86: /*
87: Initialize PETSc, note that default options in ex11options can be
88: overridden at the command line
89: */
90: PetscInitialize(&argc, &argv,"ex11options",help);
92: user.ratio = 2;
93: user.coarse.mx = 5; user.coarse.my = 5; user.param = 6.0;
94: PetscOptionsGetInt(NULL,"-Mx",&user.coarse.mx,NULL);
95: PetscOptionsGetInt(NULL,"-My",&user.coarse.my,NULL);
96: PetscOptionsGetInt(NULL,"-ratio",&user.ratio,NULL);
97: user.fine.mx = user.ratio*(user.coarse.mx-1)+1; user.fine.my = user.ratio*(user.coarse.my-1)+1;
99: PetscOptionsHasName(NULL,"-redundant_build",&user.redundant_build);
100: if (user.redundant_build) {
101: PetscPrintf(PETSC_COMM_WORLD,"Building coarse Jacobian redundantly\n");
102: }
104: PetscPrintf(PETSC_COMM_WORLD,"Coarse grid size %D by %D\n",user.coarse.mx,user.coarse.my);
105: PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",user.fine.mx,user.fine.my);
107: PetscOptionsGetReal(NULL,"-par",&user.param,NULL);
108: if (user.param >= bratu_lambda_max || user.param < bratu_lambda_min) SETERRQ(PETSC_COMM_SELF,1,"Lambda is out of range");
109: n = user.fine.mx*user.fine.my; N = user.coarse.mx*user.coarse.my;
111: MPI_Comm_size(PETSC_COMM_WORLD,&size);
112: PetscOptionsGetInt(NULL,"-Nx",&Nx,NULL);
113: PetscOptionsGetInt(NULL,"-Ny",&Ny,NULL);
115: /* Set up distributed array for fine grid */
116: DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,
117: user.fine.my,Nx,Ny,1,1,NULL,NULL,&user.fine.da);
118: DMCreateGlobalVector(user.fine.da,&user.fine.x);
119: VecDuplicate(user.fine.x,&user.fine.r);
120: VecDuplicate(user.fine.x,&user.fine.b);
121: VecGetLocalSize(user.fine.x,&nlocal);
122: DMCreateLocalVector(user.fine.da,&user.fine.localX);
123: VecDuplicate(user.fine.localX,&user.fine.localF);
124: MatCreateAIJ(PETSC_COMM_WORLD,nlocal,nlocal,n,n,5,NULL,3,NULL,&user.fine.J);
126: /* Set up distributed array for coarse grid */
127: DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,
128: user.coarse.my,Nx,Ny,1,1,NULL,NULL,&user.coarse.da);
129: DMCreateGlobalVector(user.coarse.da,&user.coarse.x);
130: VecDuplicate(user.coarse.x,&user.coarse.b);
131: if (user.redundant_build) {
132: /* Create scatter from parallel global numbering to redundant with natural ordering */
133: DMDAGlobalToNaturalAllCreate(user.coarse.da,&user.tolocalall);
134: DMDANaturalAllToGlobalCreate(user.coarse.da,&user.fromlocalall);
135: VecCreateSeq(PETSC_COMM_SELF,N,&user.localall);
136: /* Create sequential matrix to hold entire coarse grid Jacobian on each processor */
137: MatCreateSeqAIJ(PETSC_COMM_SELF,N,N,5,NULL,&user.coarse.J);
138: } else {
139: VecGetLocalSize(user.coarse.x,&Nlocal);
140: DMCreateLocalVector(user.coarse.da,&user.coarse.localX);
141: VecDuplicate(user.coarse.localX,&user.coarse.localF);
142: /* We will compute the coarse Jacobian in parallel */
143: MatCreateAIJ(PETSC_COMM_WORLD,Nlocal,Nlocal,N,N,5,NULL,3,NULL,&user.coarse.J);
144: }
146: /* Create nonlinear solver */
147: SNESCreate(PETSC_COMM_WORLD,&snes);
149: /* provide user function and Jacobian */
150: SNESSetFunction(snes,user.fine.b,FormFunction,&user);
151: SNESSetJacobian(snes,user.fine.J,user.fine.J,FormJacobian,&user);
153: /* set two level additive Schwarz preconditioner */
154: SNESGetKSP(snes,&ksp);
155: KSPGetPC(ksp,&pc);
156: PCSetType(pc,PCMG);
157: PCMGSetLevels(pc,2,NULL);
158: PCMGSetType(pc,PC_MG_ADDITIVE);
160: /* always solve the coarse problem redundantly with direct LU solver */
161: PetscOptionsSetValue("-coarse_pc_type","redundant");
162: PetscOptionsSetValue("-coarse_redundant_pc_type","lu");
163: PetscOptionsSetValue("-coarse_redundant_ksp_type","preonly");
165: /* Create coarse level */
166: PCMGGetCoarseSolve(pc,&user.ksp_coarse);
167: KSPSetOptionsPrefix(user.ksp_coarse,"coarse_");
168: KSPSetFromOptions(user.ksp_coarse);
169: KSPSetOperators(user.ksp_coarse,user.coarse.J,user.coarse.J,DIFFERENT_NONZERO_PATTERN);
170: PCMGSetX(pc,COARSE_LEVEL,user.coarse.x);
171: PCMGSetRhs(pc,COARSE_LEVEL,user.coarse.b);
172: if (user.redundant_build) {
173: PC rpc;
174: KSPGetPC(user.ksp_coarse,&rpc);
175: PCRedundantSetScatter(rpc,user.tolocalall,user.fromlocalall);
176: }
178: /* Create fine level */
179: PCMGGetSmoother(pc,FINE_LEVEL,&user.ksp_fine);
180: KSPSetOptionsPrefix(user.ksp_fine,"fine_");
181: KSPSetFromOptions(user.ksp_fine);
182: KSPSetOperators(user.ksp_fine,user.fine.J,user.fine.J,DIFFERENT_NONZERO_PATTERN);
183: PCMGSetR(pc,FINE_LEVEL,user.fine.r);
184: PCMGSetResidual(pc,FINE_LEVEL,NULL,user.fine.J);
186: /* Create interpolation between the levels */
187: FormInterpolation(&user);
188: PCMGSetInterpolation(pc,FINE_LEVEL,user.R);
189: PCMGSetRestriction(pc,FINE_LEVEL,user.R);
191: /* Set options, then solve nonlinear system */
192: SNESSetFromOptions(snes);
193: FormInitialGuess1(&user,user.fine.x);
194: SNESSolve(snes,NULL,user.fine.x);
195: SNESGetIterationNumber(snes,&its);
196: PetscPrintf(PETSC_COMM_WORLD,"Number of SNES iterations = %D\n", its);
198: /* Free data structures */
199: if (user.redundant_build) {
200: VecScatterDestroy(&user.tolocalall);
201: VecScatterDestroy(&user.fromlocalall);
202: VecDestroy(&user.localall);
203: } else {
204: VecDestroy(&user.coarse.localX);
205: VecDestroy(&user.coarse.localF);
206: }
208: MatDestroy(&user.fine.J);
209: VecDestroy(&user.fine.x);
210: VecDestroy(&user.fine.r);
211: VecDestroy(&user.fine.b);
212: DMDestroy(&user.fine.da);
213: VecDestroy(&user.fine.localX);
214: VecDestroy(&user.fine.localF);
216: MatDestroy(&user.coarse.J);
217: VecDestroy(&user.coarse.x);
218: VecDestroy(&user.coarse.b);
219: DMDestroy(&user.coarse.da);
221: SNESDestroy(&snes);
222: MatDestroy(&user.R);
223: VecDestroy(&user.Rscale);
224: PetscFinalize();
226: return 0;
227: } /* -------------------- Form initial approximation ----------------- */
230: PetscErrorCode FormInitialGuess1(AppCtx *user,Vec X)
231: {
232: PetscInt i, j, row, mx, my, xs, ys, xm, ym, Xm, Ym, Xs, Ys;
234: PetscReal one = 1.0, lambda, temp1, temp, hx, hy;
235: PetscScalar *x;
236: Vec localX = user->fine.localX;
238: mx = user->fine.mx; my = user->fine.my; lambda = user->param;
239: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
240: /* sc = hx*hy*lambda; hxdhy = hx/hy; hydhx = hy/hx; */
242: temp1 = lambda/(lambda + one);
244: /* Get ghost points */
245: DMDAGetCorners(user->fine.da,&xs,&ys,0,&xm,&ym,0);
246: DMDAGetGhostCorners(user->fine.da,&Xs,&Ys,0,&Xm,&Ym,0);
247: VecGetArray(localX,&x);
249: /* Compute initial guess */
250: for (j=ys; j<ys+ym; j++) {
251: temp = (PetscReal)(PetscMin(j,my-j-1))*hy;
252: for (i=xs; i<xs+xm; i++) {
253: row = i - Xs + (j - Ys)*Xm;
254: if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
255: x[row] = 0.0;
256: continue;
257: }
258: x[row] = temp1*PetscSqrtReal(PetscMin((PetscReal)(PetscMin(i,mx-i-1))*hx,temp));
259: }
260: }
261: VecRestoreArray(localX,&x);
263: /* Insert values into global vector */
264: DMLocalToGlobalBegin(user->fine.da,localX,INSERT_VALUES,X);
265: DMLocalToGlobalEnd(user->fine.da,localX,INSERT_VALUES,X);
266: return 0;
267: }
269: /* -------------------- Evaluate Function F(x) --------------------- */
272: PetscErrorCode FormFunction(SNES snes,Vec X,Vec F,void *ptr)
273: {
274: AppCtx *user = (AppCtx*) ptr;
275: PetscInt i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym;
277: PetscReal two = 2.0, one = 1.0, lambda,hx, hy, hxdhy, hydhx,sc;
278: PetscScalar u, uxx, uyy, *x,*f;
279: Vec localX = user->fine.localX, localF = user->fine.localF;
281: mx = user->fine.mx; my = user->fine.my; lambda = user->param;
282: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
283: sc = hx*hy*lambda; hxdhy = hx/hy; hydhx = hy/hx;
285: /* Get ghost points */
286: DMGlobalToLocalBegin(user->fine.da,X,INSERT_VALUES,localX);
287: DMGlobalToLocalEnd(user->fine.da,X,INSERT_VALUES,localX);
288: DMDAGetCorners(user->fine.da,&xs,&ys,0,&xm,&ym,0);
289: DMDAGetGhostCorners(user->fine.da,&Xs,&Ys,0,&Xm,&Ym,0);
290: VecGetArray(localX,&x);
291: VecGetArray(localF,&f);
293: /* Evaluate function */
294: for (j=ys; j<ys+ym; j++) {
295: row = (j - Ys)*Xm + xs - Xs - 1;
296: for (i=xs; i<xs+xm; i++) {
297: row++;
298: if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
299: u = x[row];
300: uxx = (two*u - x[row-1] - x[row+1])*hydhx;
301: uyy = (two*u - x[row-Xm] - x[row+Xm])*hxdhy;
302: f[row] = uxx + uyy - sc*PetscExpScalar(u);
303: } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
304: f[row] = .5*two*(hydhx + hxdhy)*x[row];
305: } else {
306: f[row] = .25*two*(hydhx + hxdhy)*x[row];
307: }
308: }
309: }
310: VecRestoreArray(localX,&x);
311: VecRestoreArray(localF,&f);
313: /* Insert values into global vector */
314: DMLocalToGlobalBegin(user->fine.da,localF,INSERT_VALUES,F);
315: DMLocalToGlobalEnd(user->fine.da,localF,INSERT_VALUES,F);
316: PetscLogFlops(11.0*ym*xm);
317: return 0;
318: }
320: /*
321: Computes the part of the Jacobian associated with this processor
322: */
325: PetscErrorCode FormJacobian_Grid(AppCtx *user,GridCtx *grid,Vec X, Mat *J,Mat *B)
326: {
327: Mat jac = *J;
329: PetscInt i, j, row, mx, my, xs, ys, xm, ym, Xs, Ys, Xm, Ym, col[5], nloc, grow;
330: const PetscInt *ltog;
331: PetscScalar two = 2.0, one = 1.0, lambda, v[5], hx, hy, hxdhy, hydhx, sc, *x, value;
332: Vec localX = grid->localX;
334: mx = grid->mx; my = grid->my; lambda = user->param;
335: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
336: sc = hx*hy; hxdhy = hx/hy; hydhx = hy/hx;
338: /* Get ghost points */
339: DMGlobalToLocalBegin(grid->da,X,INSERT_VALUES,localX);
340: DMGlobalToLocalEnd(grid->da,X,INSERT_VALUES,localX);
341: DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);
342: DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);
343: DMDAGetGlobalIndices(grid->da,&nloc,<og);
344: VecGetArray(localX,&x);
346: /* Evaluate Jacobian of function */
347: for (j=ys; j<ys+ym; j++) {
348: row = (j - Ys)*Xm + xs - Xs - 1;
349: for (i=xs; i<xs+xm; i++) {
350: row++;
351: grow = ltog[row];
352: if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
353: v[0] = -hxdhy; col[0] = ltog[row - Xm];
354: v[1] = -hydhx; col[1] = ltog[row - 1];
355: v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = grow;
356: v[3] = -hydhx; col[3] = ltog[row + 1];
357: v[4] = -hxdhy; col[4] = ltog[row + Xm];
358: MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
359: } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
360: value = .5*two*(hydhx + hxdhy);
361: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
362: } else {
363: value = .25*two*(hydhx + hxdhy);
364: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
365: }
366: }
367: }
368: DMDARestoreGlobalIndices(grid->da,&nloc,<og);
369: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
370: VecRestoreArray(localX,&x);
371: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
373: return 0;
374: }
376: /*
377: Computes the ENTIRE Jacobian associated with the ENTIRE grid sequentially
378: This is for generating the coarse grid redundantly.
380: This is BAD code duplication, since the bulk of this routine is the
381: same as the routine above
383: Note the numbering of the rows/columns is the NATURAL numbering
384: */
387: PetscErrorCode FormJacobian_Coarse(AppCtx *user,GridCtx *grid,Vec X, Mat *J,Mat *B)
388: {
389: Mat jac = *J;
391: PetscInt i, j, row, mx, my, col[5];
392: PetscScalar two = 2.0, one = 1.0, lambda, v[5], hx, hy, hxdhy, hydhx, sc, *x, value;
394: mx = grid->mx; my = grid->my; lambda = user->param;
395: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
396: sc = hx*hy; hxdhy = hx/hy; hydhx = hy/hx;
398: VecGetArray(X,&x);
400: /* Evaluate Jacobian of function */
401: for (j=0; j<my; j++) {
402: row = j*mx - 1;
403: for (i=0; i<mx; i++) {
404: row++;
405: if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
406: v[0] = -hxdhy; col[0] = row - mx;
407: v[1] = -hydhx; col[1] = row - 1;
408: v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = row;
409: v[3] = -hydhx; col[3] = row + 1;
410: v[4] = -hxdhy; col[4] = row + mx;
411: MatSetValues(jac,1,&row,5,col,v,INSERT_VALUES);
412: } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
413: value = .5*two*(hydhx + hxdhy);
414: MatSetValues(jac,1,&row,1,&row,&value,INSERT_VALUES);
415: } else {
416: value = .25*two*(hydhx + hxdhy);
417: MatSetValues(jac,1,&row,1,&row,&value,INSERT_VALUES);
418: }
419: }
420: }
421: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
422: VecRestoreArray(X,&x);
423: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
425: return 0;
426: }
428: /* -------------------- Evaluate Jacobian F'(x) --------------------- */
431: PetscErrorCode FormJacobian(SNES snes,Vec X,Mat *J,Mat *B,MatStructure *flag,void *ptr)
432: {
433: AppCtx *user = (AppCtx*) ptr;
435: KSP ksp;
436: PC pc;
437: PetscBool ismg;
439: *flag = SAME_NONZERO_PATTERN;
440: FormJacobian_Grid(user,&user->fine,X,J,B);
442: /* create coarse grid jacobian for preconditioner */
443: SNESGetKSP(snes,&ksp);
444: KSPGetPC(ksp,&pc);
446: PetscObjectTypeCompare((PetscObject)pc,PCMG,&ismg);
447: if (ismg) {
449: KSPSetOperators(user->ksp_fine,user->fine.J,user->fine.J,SAME_NONZERO_PATTERN);
451: /* restrict X to coarse grid */
452: MatMult(user->R,X,user->coarse.x);
453: VecPointwiseMult(user->coarse.x,user->coarse.x,user->Rscale);
455: /* form Jacobian on coarse grid */
456: if (user->redundant_build) {
457: /* get copy of coarse X onto each processor */
458: VecScatterBegin(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);
459: VecScatterEnd(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);
460: FormJacobian_Coarse(user,&user->coarse,user->localall,&user->coarse.J,&user->coarse.J);
462: } else {
463: /* coarse grid Jacobian computed in parallel */
464: FormJacobian_Grid(user,&user->coarse,user->coarse.x,&user->coarse.J,&user->coarse.J);
465: }
466: KSPSetOperators(user->ksp_coarse,user->coarse.J,user->coarse.J,SAME_NONZERO_PATTERN);
467: }
469: return 0;
470: }
475: /*
476: Forms the interpolation (and restriction) operator from
477: coarse grid to fine.
478: */
479: PetscErrorCode FormInterpolation(AppCtx *user)
480: {
482: PetscInt i,j,i_start,m_fine,j_start,m,n;
483: const PetscInt *idx,*idx_c;
484: PetscInt m_ghost,n_ghost,m_ghost_c,n_ghost_c,m_coarse;
485: PetscInt row,i_start_ghost,j_start_ghost,cols[4], m_c;
486: PetscInt nc,ratio = user->ratio,m_c_local,m_fine_local;
487: PetscInt i_c,j_c,i_start_c,j_start_c,n_c,i_start_ghost_c,j_start_ghost_c,col;
488: PetscScalar v[4],x,y, one = 1.0;
489: Mat mat;
490: Vec Rscale;
492: DMDAGetCorners(user->fine.da,&i_start,&j_start,0,&m,&n,0);
493: DMDAGetGhostCorners(user->fine.da,&i_start_ghost,&j_start_ghost,0,&m_ghost,&n_ghost,0);
494: DMDAGetGlobalIndices(user->fine.da,NULL,&idx);
496: DMDAGetCorners(user->coarse.da,&i_start_c,&j_start_c,0,&m_c,&n_c,0);
497: DMDAGetGhostCorners(user->coarse.da,&i_start_ghost_c,&j_start_ghost_c,0,&m_ghost_c,&n_ghost_c,0);
498: DMDAGetGlobalIndices(user->coarse.da,NULL,&idx_c);
500: /* create interpolation matrix */
501: VecGetLocalSize(user->fine.x,&m_fine_local);
502: VecGetLocalSize(user->coarse.x,&m_c_local);
503: VecGetSize(user->fine.x,&m_fine);
504: VecGetSize(user->coarse.x,&m_coarse);
505: MatCreateAIJ(PETSC_COMM_WORLD,m_fine_local,m_c_local,m_fine,m_coarse,
506: 5,0,3,0,&mat);
508: /* loop over local fine grid nodes setting interpolation for those*/
509: for (j=j_start; j<j_start+n; j++) {
510: for (i=i_start; i<i_start+m; i++) {
511: /* convert to local "natural" numbering and then to PETSc global numbering */
512: row = idx[m_ghost*(j-j_start_ghost) + (i-i_start_ghost)];
514: i_c = (i/ratio); /* coarse grid node to left of fine grid node */
515: j_c = (j/ratio); /* coarse grid node below fine grid node */
517: /*
518: Only include those interpolation points that are truly
519: nonzero. Note this is very important for final grid lines
520: in x and y directions; since they have no right/top neighbors
521: */
522: x = ((PetscReal)(i - i_c*ratio))/((PetscReal)ratio);
523: y = ((PetscReal)(j - j_c*ratio))/((PetscReal)ratio);
524: nc = 0;
525: /* one left and below; or we are right on it */
526: if (j_c < j_start_ghost_c || j_c > j_start_ghost_c+n_ghost_c) SETERRQ3(PETSC_COMM_SELF,1,"Sorry j %D %D %D",j_c,j_start_ghost_c,j_start_ghost_c+n_ghost_c);
527: if (i_c < i_start_ghost_c || i_c > i_start_ghost_c+m_ghost_c) SETERRQ3(PETSC_COMM_SELF,1,"Sorry i %D %D %D",i_c,i_start_ghost_c,i_start_ghost_c+m_ghost_c);
528: col = m_ghost_c*(j_c-j_start_ghost_c) + (i_c-i_start_ghost_c);
529: cols[nc] = idx_c[col];
530: v[nc++] = x*y - x - y + 1.0;
531: /* one right and below */
532: if (i_c*ratio != i) {
533: cols[nc] = idx_c[col+1];
534: v[nc++] = -x*y + x;
535: }
536: /* one left and above */
537: if (j_c*ratio != j) {
538: cols[nc] = idx_c[col+m_ghost_c];
539: v[nc++] = -x*y + y;
540: }
541: /* one right and above */
542: if (j_c*ratio != j && i_c*ratio != i) {
543: cols[nc] = idx_c[col+m_ghost_c+1];
544: v[nc++] = x*y;
545: }
546: MatSetValues(mat,1,&row,nc,cols,v,INSERT_VALUES);
547: }
548: }
549: DMDARestoreGlobalIndices(user->fine.da,NULL,&idx);
550: DMDARestoreGlobalIndices(user->fine.da,NULL,&idx_c);
551: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
552: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
554: VecDuplicate(user->coarse.x,&Rscale);
555: VecSet(user->fine.x,one);
556: MatMultTranspose(mat,user->fine.x,Rscale);
557: VecReciprocal(Rscale);
558: user->Rscale = Rscale;
559: user->R = mat;
560: return 0;
561: }