Actual source code: ex5.c
petsc-dev 2014-02-02
2: static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n";
4: /*
5: Page 21, Pattern Formation with Reaction-Diffusion Equations
7: u_t = D1 (u_xx + u_yy) - u*v^2 + gama(1 -u)
8: v_t = D2 (v_xx + v_yy) + u*v^2 - (gamma + kappa)v
10: Unlike in the book this uses periodic boundary conditions instead of Neumann
11: (since they are easier for finite differences).
12: */
14: /*
15: Helpful runtime monitor options:
16: -ts_monitor_draw_solution
17: -draw_save -draw_save_movie
19: Helpful runtime linear solver options:
20: -pc_type mg -pc_mg_galerkin -da_refine 1 -snes_monitor -ksp_monitor -ts_view (note that these Jacobians are so well-conditioned multigrid may not be the best solver)
22: Point your browser to localhost:8080 to monitor the simulation
23: ./ex5 -ts_view_pre saws -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root .
25: */
27: /*
29: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
30: Include "petscts.h" so that we can use SNES solvers. Note that this
31: file automatically includes:
32: petscsys.h - base PETSc routines petscvec.h - vectors
33: petscmat.h - matrices
34: petscis.h - index sets petscksp.h - Krylov subspace methods
35: petscviewer.h - viewers petscpc.h - preconditioners
36: petscksp.h - linear solvers
37: */
38: #include <petscdmda.h>
39: #include <petscts.h>
41: typedef struct {
42: PetscScalar u,v;
43: } Field;
45: typedef struct {
46: PetscReal D1,D2,gamma,kappa;
47: } AppCtx;
49: /*
50: User-defined routines
51: */
52: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*),InitialConditions(DM,Vec);
53: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
57: int main(int argc,char **argv)
58: {
59: TS ts; /* ODE integrator */
60: Vec x; /* solution */
62: DM da;
63: AppCtx appctx;
65: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
66: Initialize program
67: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
68: PetscInitialize(&argc,&argv,(char*)0,help);
70: appctx.D1 = 8.0e-5;
71: appctx.D2 = 4.0e-5;
72: appctx.gamma = .024;
73: appctx.kappa = .06;
75: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
76: Create distributed array (DMDA) to manage parallel grid and vectors
77: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
78: DMDACreate2d(PETSC_COMM_WORLD,DMDA_BOUNDARY_PERIODIC,DMDA_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,-65,-65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);
79: DMDASetFieldName(da,0,"u");
80: DMDASetFieldName(da,1,"v");
82: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: Extract global vectors from DMDA; then duplicate for remaining
84: vectors that are the same types
85: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
86: DMCreateGlobalVector(da,&x);
88: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
89: Create timestepping solver context
90: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
91: TSCreate(PETSC_COMM_WORLD,&ts);
92: TSSetType(ts,TSARKIMEX);
93: TSARKIMEXSetFullyImplicit(ts,PETSC_TRUE);
94: TSSetDM(ts,da);
95: TSSetProblemType(ts,TS_NONLINEAR);
96: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
97: TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx);
99: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
100: Set initial conditions
101: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
102: InitialConditions(da,x);
103: TSSetSolution(ts,x);
105: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106: Set solver options
107: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
108: TSSetDuration(ts,PETSC_DEFAULT,2000.0);
109: TSSetInitialTimeStep(ts,0.0,.0001);
110: TSSetFromOptions(ts);
112: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
113: Solve ODE system
114: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
115: TSSolve(ts,x);
117: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: Free work space. All PETSc objects should be destroyed when they
119: are no longer needed.
120: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
121: VecDestroy(&x);
122: TSDestroy(&ts);
123: DMDestroy(&da);
125: PetscFinalize();
126: return(0);
127: }
128: /* ------------------------------------------------------------------- */
131: /*
132: RHSFunction - Evaluates nonlinear function, F(x).
134: Input Parameters:
135: . ts - the TS context
136: . X - input vector
137: . ptr - optional user-defined context, as set by TSSetRHSFunction()
139: Output Parameter:
140: . F - function vector
141: */
142: PetscErrorCode RHSFunction(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
143: {
144: AppCtx *appctx = (AppCtx*)ptr;
145: DM da;
147: PetscInt i,j,Mx,My,xs,ys,xm,ym;
148: PetscReal hx,hy,sx,sy;
149: PetscScalar uc,uxx,uyy,vc,vxx,vyy;
150: Field **u,**f;
151: Vec localU;
154: TSGetDM(ts,&da);
155: DMGetLocalVector(da,&localU);
156: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
158: hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
159: hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
161: /*
162: Scatter ghost points to local vector,using the 2-step process
163: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
164: By placing code between these two statements, computations can be
165: done while messages are in transition.
166: */
167: DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
168: DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);
170: /*
171: Get pointers to vector data
172: */
173: DMDAVecGetArray(da,localU,&u);
174: DMDAVecGetArray(da,F,&f);
176: /*
177: Get local grid boundaries
178: */
179: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
181: /*
182: Compute function over the locally owned part of the grid
183: */
184: for (j=ys; j<ys+ym; j++) {
185: for (i=xs; i<xs+xm; i++) {
186: uc = u[j][i].u;
187: uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
188: uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
189: vc = u[j][i].v;
190: vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
191: vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
192: f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
193: f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
194: }
195: }
196: PetscLogFlops(16*xm*ym);
198: /*
199: Restore vectors
200: */
201: DMDAVecRestoreArray(da,localU,&u);
202: DMDAVecRestoreArray(da,F,&f);
203: DMRestoreLocalVector(da,&localU);
204: return(0);
205: }
207: /* ------------------------------------------------------------------- */
210: PetscErrorCode InitialConditions(DM da,Vec U)
211: {
213: PetscInt i,j,xs,ys,xm,ym,Mx,My;
214: Field **u;
215: PetscReal hx,hy,x,y;
218: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
220: hx = 2.5/(PetscReal)(Mx);
221: hy = 2.5/(PetscReal)(My);
223: /*
224: Get pointers to vector data
225: */
226: DMDAVecGetArray(da,U,&u);
228: /*
229: Get local grid boundaries
230: */
231: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
233: /*
234: Compute function over the locally owned part of the grid
235: */
236: for (j=ys; j<ys+ym; j++) {
237: y = j*hy;
238: for (i=xs; i<xs+xm; i++) {
239: x = i*hx;
240: if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
241: else u[j][i].v = 0.0;
243: u[j][i].u = 1.0 - 2.0*u[j][i].v;
244: }
245: }
247: /*
248: Restore vectors
249: */
250: DMDAVecRestoreArray(da,U,&u);
251: return(0);
252: }
256: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec U,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
257: {
258: Mat A = *AA; /* Jacobian matrix */
259: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
260: DM da;
262: PetscInt i,j,Mx,My,xs,ys,xm,ym;
263: PetscReal hx,hy,sx,sy;
264: PetscScalar uc,vc;
265: Field **u;
266: Vec localU;
267: MatStencil stencil[6],rowstencil;
268: PetscScalar entries[6];
271: TSGetDM(ts,&da);
272: DMGetLocalVector(da,&localU);
273: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
275: hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
276: hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
278: /*
279: Scatter ghost points to local vector,using the 2-step process
280: DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
281: By placing code between these two statements, computations can be
282: done while messages are in transition.
283: */
284: DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);
285: DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);
287: /*
288: Get pointers to vector data
289: */
290: DMDAVecGetArray(da,localU,&u);
292: /*
293: Get local grid boundaries
294: */
295: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
297: stencil[0].k = 0;
298: stencil[1].k = 0;
299: stencil[2].k = 0;
300: stencil[3].k = 0;
301: stencil[4].k = 0;
302: stencil[5].k = 0;
303: rowstencil.k = 0;
304: /*
305: Compute function over the locally owned part of the grid
306: */
307: for (j=ys; j<ys+ym; j++) {
309: stencil[0].j = j-1;
310: stencil[1].j = j+1;
311: stencil[2].j = j;
312: stencil[3].j = j;
313: stencil[4].j = j;
314: stencil[5].j = j;
315: rowstencil.k = 0; rowstencil.j = j;
316: for (i=xs; i<xs+xm; i++) {
317: uc = u[j][i].u;
318: vc = u[j][i].v;
320: /* uxx = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
321: uyy = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
323: vxx = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
324: vyy = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
325: f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
327: stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
328: stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
329: stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
330: stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
331: stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
332: stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
333: rowstencil.i = i; rowstencil.c = 0;
335: MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
337: stencil[0].c = 1; entries[0] = appctx->D2*sy;
338: stencil[1].c = 1; entries[1] = appctx->D2*sy;
339: stencil[2].c = 1; entries[2] = appctx->D2*sx;
340: stencil[3].c = 1; entries[3] = appctx->D2*sx;
341: stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
342: stencil[5].c = 0; entries[5] = vc*vc;
343: rowstencil.c = 1;
345: MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);
346: /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
347: }
348: }
350: /*
351: Restore vectors
352: */
353: PetscLogFlops(19*xm*ym);
354: DMDAVecRestoreArray(da,localU,&u);
355: DMRestoreLocalVector(da,&localU);
356: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
357: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
358: *str = SAME_NONZERO_PATTERN;
359: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
360: return(0);
361: }