Actual source code: ex4.c
2: /* Program usage: mpirun -np <procs> ex4 [-help] [all PETSc options] */
4: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
5: Input parameters include:\n\
6: -m <points>, where <points> = number of grid points\n\
7: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
8: -debug : Activate debugging printouts\n\
9: -nox : Deactivate x-window graphics\n\n";
11: /*
12: Concepts: TS^time-dependent linear problems
13: Concepts: TS^heat equation
14: Concepts: TS^diffusion equation
15: Processors: n
16: */
18: /* ------------------------------------------------------------------------
20: This program solves the one-dimensional heat equation (also called the
21: diffusion equation),
22: u_t = u_xx,
23: on the domain 0 <= x <= 1, with the boundary conditions
24: u(t,0) = 0, u(t,1) = 0,
25: and the initial condition
26: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
27: This is a linear, second-order, parabolic equation.
29: We discretize the right-hand side using finite differences with
30: uniform grid spacing h:
31: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
32: We then demonstrate time evolution using the various TS methods by
33: running the program via
34: mpirun -np <procs> ex3 -ts_type <timestepping solver>
36: We compare the approximate solution with the exact solution, given by
37: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
38: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
40: Notes:
41: This code demonstrates the TS solver interface to two variants of
42: linear problems, u_t = f(u,t), namely
43: - time-dependent f: f(u,t) is a function of t
44: - time-independent f: f(u,t) is simply f(u)
46: The uniprocessor version of this code is ts/examples/tutorials/ex3.c
48: ------------------------------------------------------------------------- */
50: /*
51: Include "petscda.h" so that we can use distributed arrays (DAs) to manage
52: the parallel grid. Include "petscts.h" so that we can use TS solvers.
53: Note that this file automatically includes:
54: petsc.h - base PETSc routines petscvec.h - vectors
55: petscsys.h - system routines petscmat.h - matrices
56: petscis.h - index sets petscksp.h - Krylov subspace methods
57: petscviewer.h - viewers petscpc.h - preconditioners
58: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
59: */
61: #include petscda.h
62: #include petscts.h
64: /*
65: User-defined application context - contains data needed by the
66: application-provided call-back routines.
67: */
68: typedef struct {
69: MPI_Comm comm; /* communicator */
70: DA da; /* distributed array data structure */
71: Vec localwork; /* local ghosted work vector */
72: Vec u_local; /* local ghosted approximate solution vector */
73: Vec solution; /* global exact solution vector */
74: PetscInt m; /* total number of grid points */
75: PetscReal h; /* mesh width h = 1/(m-1) */
76: PetscTruth debug; /* flag (1 indicates activation of debugging printouts) */
77: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
78: PetscReal norm_2,norm_max; /* error norms */
79: } AppCtx;
81: /*
82: User-defined routines
83: */
91: int main(int argc,char **argv)
92: {
93: AppCtx appctx; /* user-defined application context */
94: TS ts; /* timestepping context */
95: Mat A; /* matrix data structure */
96: Vec u; /* approximate solution vector */
97: PetscReal time_total_max = 100.0; /* default max total time */
98: PetscInt time_steps_max = 100; /* default max timesteps */
99: PetscDraw draw; /* drawing context */
101: PetscInt steps,m;
102: PetscMPIInt size;
103: PetscReal dt,ftime;
104: PetscTruth flg;
106: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107: Initialize program and set problem parameters
108: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109:
110: PetscInitialize(&argc,&argv,(char*)0,help);
111: appctx.comm = PETSC_COMM_WORLD;
113: m = 60;
114: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
115: PetscOptionsHasName(PETSC_NULL,"-debug",&appctx.debug);
116: appctx.m = m;
117: appctx.h = 1.0/(m-1.0);
118: appctx.norm_2 = 0.0;
119: appctx.norm_max = 0.0;
120: MPI_Comm_size(PETSC_COMM_WORLD,&size);
121: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size);
123: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124: Create vector data structures
125: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126: /*
127: Create distributed array (DA) to manage parallel grid and vectors
128: and to set up the ghost point communication pattern. There are M
129: total grid values spread equally among all the processors.
130: */
132: DACreate1d(PETSC_COMM_WORLD,DA_NONPERIODIC,m,1,1,PETSC_NULL,&appctx.da);
134: /*
135: Extract global and local vectors from DA; we use these to store the
136: approximate solution. Then duplicate these for remaining vectors that
137: have the same types.
138: */
139: DACreateGlobalVector(appctx.da,&u);
140: DACreateLocalVector(appctx.da,&appctx.u_local);
142: /*
143: Create local work vector for use in evaluating right-hand-side function;
144: create global work vector for storing exact solution.
145: */
146: VecDuplicate(appctx.u_local,&appctx.localwork);
147: VecDuplicate(u,&appctx.solution);
149: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: Set up displays to show graphs of the solution and error
151: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
154: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
155: PetscDrawSetDoubleBuffer(draw);
156: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
157: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
158: PetscDrawSetDoubleBuffer(draw);
160: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: Create timestepping solver context
162: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
164: TSCreate(PETSC_COMM_WORLD,&ts);
165: TSSetProblemType(ts,TS_LINEAR);
167: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: Set optional user-defined monitoring routine
169: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171: TSSetMonitor(ts,Monitor,&appctx,PETSC_NULL);
173: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: Create matrix data structure; set matrix evaluation routine.
176: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
178: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m,m,&A);
179: MatSetFromOptions(A);
181: PetscOptionsHasName(PETSC_NULL,"-time_dependent_rhs",&flg);
182: if (flg) {
183: /*
184: For linear problems with a time-dependent f(u,t) in the equation
185: u_t = f(u,t), the user provides the discretized right-hand-side
186: as a time-dependent matrix.
187: */
188: TSSetRHSMatrix(ts,A,A,RHSMatrixHeat,&appctx);
189: } else {
190: /*
191: For linear problems with a time-independent f(u) in the equation
192: u_t = f(u), the user provides the discretized right-hand-side
193: as a matrix only once, and then sets a null matrix evaluation
194: routine.
195: */
196: MatStructure A_structure;
197: RHSMatrixHeat(ts,0.0,&A,&A,&A_structure,&appctx);
198: TSSetRHSMatrix(ts,A,A,PETSC_NULL,&appctx);
199: }
201: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: Set solution vector and initial timestep
203: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
205: dt = appctx.h*appctx.h/2.0;
206: TSSetInitialTimeStep(ts,0.0,dt);
207: TSSetSolution(ts,u);
209: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: Customize timestepping solver:
211: - Set the solution method to be the Backward Euler method.
212: - Set timestepping duration info
213: Then set runtime options, which can override these defaults.
214: For example,
215: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
216: to override the defaults set by TSSetDuration().
217: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
219: TSSetDuration(ts,time_steps_max,time_total_max);
220: TSSetFromOptions(ts);
222: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
223: Solve the problem
224: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
226: /*
227: Evaluate initial conditions
228: */
229: InitialConditions(u,&appctx);
231: /*
232: Run the timestepping solver
233: */
234: TSStep(ts,&steps,&ftime);
236: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
237: View timestepping solver info
238: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
240: PetscPrintf(PETSC_COMM_WORLD,"avg. error (2 norm) = %g, avg. error (max norm) = %g\n",
241: appctx.norm_2/steps,appctx.norm_max/steps);
242: TSView(ts,PETSC_VIEWER_STDOUT_WORLD);
244: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
245: Free work space. All PETSc objects should be destroyed when they
246: are no longer needed.
247: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
249: TSDestroy(ts);
250: MatDestroy(A);
251: VecDestroy(u);
252: PetscViewerDestroy(appctx.viewer1);
253: PetscViewerDestroy(appctx.viewer2);
254: VecDestroy(appctx.localwork);
255: VecDestroy(appctx.solution);
256: VecDestroy(appctx.u_local);
257: DADestroy(appctx.da);
259: /*
260: Always call PetscFinalize() before exiting a program. This routine
261: - finalizes the PETSc libraries as well as MPI
262: - provides summary and diagnostic information if certain runtime
263: options are chosen (e.g., -log_summary).
264: */
265: PetscFinalize();
266: return 0;
267: }
268: /* --------------------------------------------------------------------- */
271: /*
272: InitialConditions - Computes the solution at the initial time.
274: Input Parameter:
275: u - uninitialized solution vector (global)
276: appctx - user-defined application context
278: Output Parameter:
279: u - vector with solution at initial time (global)
280: */
281: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
282: {
283: PetscScalar *u_localptr,h = appctx->h;
284: PetscInt i,mybase,myend;
287: /*
288: Determine starting point of each processor's range of
289: grid values.
290: */
291: VecGetOwnershipRange(u,&mybase,&myend);
293: /*
294: Get a pointer to vector data.
295: - For default PETSc vectors, VecGetArray() returns a pointer to
296: the data array. Otherwise, the routine is implementation dependent.
297: - You MUST call VecRestoreArray() when you no longer need access to
298: the array.
299: - Note that the Fortran interface to VecGetArray() differs from the
300: C version. See the users manual for details.
301: */
302: VecGetArray(u,&u_localptr);
304: /*
305: We initialize the solution array by simply writing the solution
306: directly into the array locations. Alternatively, we could use
307: VecSetValues() or VecSetValuesLocal().
308: */
309: for (i=mybase; i<myend; i++) {
310: u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
311: }
313: /*
314: Restore vector
315: */
316: VecRestoreArray(u,&u_localptr);
318: /*
319: Print debugging information if desired
320: */
321: if (appctx->debug) {
322: PetscPrintf(appctx->comm,"initial guess vector\n");
323: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
324: }
326: return 0;
327: }
328: /* --------------------------------------------------------------------- */
331: /*
332: ExactSolution - Computes the exact solution at a given time.
334: Input Parameters:
335: t - current time
336: solution - vector in which exact solution will be computed
337: appctx - user-defined application context
339: Output Parameter:
340: solution - vector with the newly computed exact solution
341: */
342: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
343: {
344: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
345: PetscInt i,mybase,myend;
348: /*
349: Determine starting and ending points of each processor's
350: range of grid values
351: */
352: VecGetOwnershipRange(solution,&mybase,&myend);
354: /*
355: Get a pointer to vector data.
356: */
357: VecGetArray(solution,&s_localptr);
359: /*
360: Simply write the solution directly into the array locations.
361: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
362: */
363: ex1 = exp(-36.*PETSC_PI*PETSC_PI*t); ex2 = exp(-4.*PETSC_PI*PETSC_PI*t);
364: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
365: for (i=mybase; i<myend; i++) {
366: s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
367: }
369: /*
370: Restore vector
371: */
372: VecRestoreArray(solution,&s_localptr);
373: return 0;
374: }
375: /* --------------------------------------------------------------------- */
378: /*
379: Monitor - User-provided routine to monitor the solution computed at
380: each timestep. This example plots the solution and computes the
381: error in two different norms.
383: Input Parameters:
384: ts - the timestep context
385: step - the count of the current step (with 0 meaning the
386: initial condition)
387: time - the current time
388: u - the solution at this timestep
389: ctx - the user-provided context for this monitoring routine.
390: In this case we use the application context which contains
391: information about the problem size, workspace and the exact
392: solution.
393: */
394: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
395: {
396: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
398: PetscReal norm_2,norm_max;
399: PetscScalar mone = -1.0;
401: /*
402: View a graph of the current iterate
403: */
404: VecView(u,appctx->viewer2);
406: /*
407: Compute the exact solution
408: */
409: ExactSolution(time,appctx->solution,appctx);
411: /*
412: Print debugging information if desired
413: */
414: if (appctx->debug) {
415: PetscPrintf(appctx->comm,"Computed solution vector\n");
416: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
417: PetscPrintf(appctx->comm,"Exact solution vector\n");
418: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
419: }
421: /*
422: Compute the 2-norm and max-norm of the error
423: */
424: VecAXPY(&mone,u,appctx->solution);
425: VecNorm(appctx->solution,NORM_2,&norm_2);
426: norm_2 = sqrt(appctx->h)*norm_2;
427: VecNorm(appctx->solution,NORM_MAX,&norm_max);
429: /*
430: PetscPrintf() causes only the first processor in this
431: communicator to print the timestep information.
432: */
433: PetscPrintf(appctx->comm,"Timestep %D: time = %g, 2-norm error = %g, max norm error = %g\n",
434: step,time,norm_2,norm_max);
435: appctx->norm_2 += norm_2;
436: appctx->norm_max += norm_max;
438: /*
439: View a graph of the error
440: */
441: VecView(appctx->solution,appctx->viewer1);
443: /*
444: Print debugging information if desired
445: */
446: if (appctx->debug) {
447: PetscPrintf(appctx->comm,"Error vector\n");
448: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
449: }
451: return 0;
452: }
453: /* --------------------------------------------------------------------- */
456: /*
457: RHSMatrixHeat - User-provided routine to compute the right-hand-side
458: matrix for the heat equation.
460: Input Parameters:
461: ts - the TS context
462: t - current time
463: global_in - global input vector
464: dummy - optional user-defined context, as set by TSetRHSJacobian()
466: Output Parameters:
467: AA - Jacobian matrix
468: BB - optionally different preconditioning matrix
469: str - flag indicating matrix structure
471: Notes:
472: RHSMatrixHeat computes entries for the locally owned part of the system.
473: - Currently, all PETSc parallel matrix formats are partitioned by
474: contiguous chunks of rows across the processors.
475: - Each processor needs to insert only elements that it owns
476: locally (but any non-local elements will be sent to the
477: appropriate processor during matrix assembly).
478: - Always specify global row and columns of matrix entries when
479: using MatSetValues(); we could alternatively use MatSetValuesLocal().
480: - Here, we set all entries for a particular row at once.
481: - Note that MatSetValues() uses 0-based row and column numbers
482: in Fortran as well as in C.
483: */
484: PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
485: {
486: Mat A = *AA; /* Jacobian matrix */
487: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
489: PetscInt i,mstart,mend,idx[3];
490: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
492: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
493: Compute entries for the locally owned part of the matrix
494: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
496: MatGetOwnershipRange(A,&mstart,&mend);
498: /*
499: Set matrix rows corresponding to boundary data
500: */
502: if (mstart == 0) { /* first processor only */
503: v[0] = 1.0;
504: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
505: mstart++;
506: }
508: if (mend == appctx->m) { /* last processor only */
509: mend--;
510: v[0] = 1.0;
511: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
512: }
514: /*
515: Set matrix rows corresponding to interior data. We construct the
516: matrix one row at a time.
517: */
518: v[0] = sone; v[1] = stwo; v[2] = sone;
519: for (i=mstart; i<mend; i++) {
520: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
521: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
522: }
524: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
525: Complete the matrix assembly process and set some options
526: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
527: /*
528: Assemble matrix, using the 2-step process:
529: MatAssemblyBegin(), MatAssemblyEnd()
530: Computations can be done while messages are in transition
531: by placing code between these two statements.
532: */
533: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
534: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
536: /*
537: Set flag to indicate that the Jacobian matrix retains an identical
538: nonzero structure throughout all timestepping iterations (although the
539: values of the entries change). Thus, we can save some work in setting
540: up the preconditioner (e.g., no need to redo symbolic factorization for
541: ILU/ICC preconditioners).
542: - If the nonzero structure of the matrix is different during
543: successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
544: must be used instead. If you are unsure whether the matrix
545: structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
546: - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
547: believes your assertion and does not check the structure
548: of the matrix. If you erroneously claim that the structure
549: is the same when it actually is not, the new preconditioner
550: will not function correctly. Thus, use this optimization
551: feature with caution!
552: */
553: *str = SAME_NONZERO_PATTERN;
555: /*
556: Set and option to indicate that we will never add a new nonzero location
557: to the matrix. If we do, it will generate an error.
558: */
559: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR);
561: return 0;
562: }