Actual source code: ex2f.F

  1: !
  2: !/*T
  3: !   Concepts: TS^time-dependent nonlinear problems
  4: !   Processors: n
  5: !T*/
  6: !
  7: !  ------------------------------------------------------------------------
  8: !
  9: !   This program solves a simple time-dependent nonlinear PDE using implicit
 10: !   timestepping:
 11: !                                    u * u_xx
 12: !                              u_t = ---------
 13: !                                    2*(t+1)^2
 14: !
 15: !             u(0,x) = 1 + x*x; u(t,0) = t + 1; u(t,1) = 2*t + 2
 16: !
 17: !   The exact solution is u(t,x) = (1 + x*x) * (1 + t).
 18: !
 19: !   Note that since the solution is linear in time and quadratic in x,
 20: !   the finite difference scheme actually computes the "exact" solution.
 21: !
 22: !   We use the backward Euler method.
 23: !
 24: !  --------------------------------------------------------------------------

 26:       program main
 27:       implicit none

 29: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 30: !                    Include files
 31: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 32: !
 33: !  Each routine within this program uses the include file 'ex2f.h',
 34: !  which itself includes the various PETSc include files as well as
 35: !  problem-specific data in several common blocks.
 36: !
 37: !  This program uses CPP for preprocessing, as indicated by the use of
 38: !  PETSc include files in the directory petsc/include/finclude.  This
 39: !  convention enables use of the CPP preprocessor, which allows the use
 40: !  of the #include statements that define PETSc objects and variables.
 41: !
 42: !  Use of the conventional Fortran include statements is also supported
 43: !  In this case, the PETsc include files are located in the directory
 44: !  petsc/include/foldinclude.
 45: !
 46: !  Since one must be very careful to include each file no more than once
 47: !  in a Fortran routine, application programmers must exlicitly list
 48: !  each file needed for the various PETSc components within their
 49: !  program (unlike the C/C++ interface).
 50: !
 51: !  See the Fortran section of the PETSc users manual for details.

 53: #include "ex2f.h"

 55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 56: !                   Variable declarations
 57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 58: !
 59: !  Variables:
 60: !     ts             - timestepping solver
 61: !     A              - Jacobian matrix context
 62: !     u_local        - local vector
 63: !     u              - approximate solution vector
 64: !     ftime          - final time
 65: !     time_total_max - default total max time
 66: !     time_steps_max - default max timesteps
 67: !
 68: !  Note that vectors are declared as PETSc "Vec" objects.  These vectors
 69: !  are mathematical objects that contain more than just an array of
 70: !  double precision numbers. I.e., vectors in PETSc are not just
 71: !        double precision x(*).
 72: !  However, local vector data can be easily accessed via VecGetArray().
 73: !  See the Fortran section of the PETSc users manual for details.

 75:       TS               ts
 76:       Vec              u
 77:       Mat              A
 78:       PetscTruth       flg,monitor
 79:       PetscErrorCode ierr
 80:       PetscInt          time_steps_max,steps,i1
 81:       PetscReal dt,ftime,time_total_max

 83: !  Note: Any user-defined Fortran routines (such as RHSFunction)
 84: !  MUST be declared as external.

 86:       external MyMonitor,RHSFunction
 87:       external InitialConditions,RHSJacobian

 89: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 90: !                 Beginning of program
 91: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 93:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 94:       comm = PETSC_COMM_WORLD
 95:       call MPI_Comm_size(comm,size,ierr)
 96:       call MPI_Comm_rank(comm,rank,ierr)

 98: !  Initialize problem parameters

100:       time_steps_max = 1000
101:       time_total_max = 100.0
102:       m              = 60
103:       debug          = 0
104:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',M,flg,ierr)
105:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-debug',debug,ierr)
106:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-monitor',monitor,       &
107:      &                         ierr)
108:       one_d0  = 1.0d0
109:       two_d0  = 2.0d0
110:       four_d0 = 4.0d0
111:       h       = one_d0/(m-one_d0)

113: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114: !  Create vector data structures
115: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

117: !  Create distributed array (DA) to manage parallel grid and vectors
118: !  Set up the ghost point communication pattern.  There are m total
119: !  grid values spread equally among all the processors.
120:       i1 = 1
121:       call DACreate1d(PETSC_COMM_WORLD,DA_NONPERIODIC,m,i1,i1,                &
122:      &     PETSC_NULL_INTEGER,da,ierr)

124: !  Extract global and local vectors from DA; then duplicate for remaining
125: !  vectors that are the same types.

127:       call DACreateGlobalVector(da,u,ierr)
128:       call DACreateLocalVector(da,u_local,ierr)

130: !  Make local work vector for evaluating right-hand-side function
131:       call VecDuplicate(u_local,localwork,ierr)

133: !  Make global work vector for storing exact solution
134:       call VecDuplicate(u,solution,ierr)

136: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: !  Create timestepping solver context; set callback routine for
138: !  right-hand-side function evaluation.
139: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

141:       call TSCreate(comm,ts,ierr)
142:       call TSSetProblemType(ts,TS_NONLINEAR,ierr)
143:       call TSSetRHSFunction(ts,RHSFunction,PETSC_NULL_OBJECT,ierr)

145: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: !  Set optional user-defined monitoring routine
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -


150:       if (monitor .eq. 1) then
151:         call TSSetMonitor(ts,MyMonitor,PETSC_NULL_OBJECT,               &
152:      &                    PETSC_NULL_FUNCTION,ierr)
153:       endif

155: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156: !  For nonlinear problems, the user can provide a Jacobian evaluation
157: !  routine (or use a finite differencing approximation).
158: !
159: !  Create matrix data structure; set Jacobian evaluation routine.
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

162:       call MatCreate(comm,PETSC_DECIDE,PETSC_DECIDE,m,m,A,ierr)
163:       call MatSetFromOptions(A,ierr)
164:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-fdjac',flg,ierr)
165:       if (flg .eq. 1) then
166:          call SetCRoutineFromFortran(ts,A,A,ierr)
167:       else
168:          call TSSetRHSJacobian(ts,A,A,RHSJacobian,PETSC_NULL_OBJECT,    &
169:      &        ierr)
170:       endif

172: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: !  Set solution vector and initial timestep
174: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

176:       dt = h/two_d0
177:       call TSSetInitialTimeStep(ts,0.d0,dt,ierr)
178:       call TSSetSolution(ts,u,ierr)

180: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
181: !  Customize timestepping solver:
182: !     - Set the solution method to be the Backward Euler method.
183: !     - Set timestepping duration info
184: !  Then set runtime options, which can override these defaults.
185: !  For example,
186: !     -ts_max_steps <maxsteps> -ts_max_time <maxtime>
187: !  to override the defaults set by TSSetDuration().
188: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

190:       call TSSetType(ts,TS_BEULER,ierr)
191:       call TSSetDuration(ts,time_steps_max,time_total_max,ierr)
192:       call TSSetFromOptions(ts,ierr)

194: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: !  Solve the problem
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

198: !  Evaluate initial conditions

200:       call InitialConditions(u)

202: !  Run the timestepping solver

204:       call TSStep(ts,steps,ftime,ierr)

206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
207: !  Free work space.  All PETSc objects should be destroyed when they
208: !  are no longer needed.
209: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

211:       call TSDestroy(ts,ierr)
212:       call VecDestroy(localwork,ierr)
213:       call VecDestroy(solution,ierr)
214:       call VecDestroy(u_local,ierr)
215:       call VecDestroy(u,ierr)
216:       call DADestroy(da,ierr)
217:       call MatDestroy(A,ierr)

219: !  Always call PetscFinalize() before exiting a program.  This routine
220: !    - finalizes the PETSc libraries as well as MPI
221: !    - provides summary and diagnostic information if certain runtime
222: !      options are chosen (e.g., -log_summary).

224:       call PetscFinalize(ierr)
225:       end

227: !  ------------------------------------------------------------------------
228: !
229: !  InitialConditions - Computes the solution at the initial time.
230: !
231: !  Input Parameters:
232: !     u - uninitialized solution vector (global)
233: !     appctx - user-defined application context
234: !
235: !  Output Parameter:
236: !     u - vector with solution at initial time (global)
237: !
238:       subroutine InitialConditions(u)
239:       implicit none
240: #include "ex2f.h"

242: !  Input/output parameters:
243:       Vec     u

245: !  Local variables:
246:       PetscScalar  localptr(1),x
247:       PetscInt           i,mybase,myend
248:       PetscErrorCode ierr
249:       PetscOffset       idx

251: !  Determine starting and ending point of each processor's range of
252: !  grid values.  Note that we shift by 1 to convert from the 0-based
253: !  C convention of starting indices to the 1-based Fortran convention.

255:       call VecGetOwnershipRange(u,mybase,myend,ierr)
256:       mybase = mybase + 1

258: !  Get a pointer to vector data.
259: !    - For default PETSc vectors, VecGetArray() returns a pointer to
260: !      the data array.  Otherwise, the routine is implementation dependent.
261: !    - You MUST call VecRestoreArray() when you no longer need access to
262: !      the array.
263: !    - Note that the Fortran interface to VecGetArray() differs from the
264: !      C version.  See the users manual for details.

266:       call VecGetArray(u,localptr,idx,ierr)

268: !     We initialize the solution array by simply writing the solution
269: !     directly into the array locations.  Alternatively, we could use
270: !     VecSetValues() or VecSetValuesLocal().

272:       do 10, i=mybase,myend
273: !       x - current location in global grid
274:         x = h*(i-1)
275:         localptr(i-mybase+idx+1) = one_d0 + x*x
276:  10   continue

278: !  Restore vector

280:       call VecRestoreArray(u,localptr,idx,ierr)

282: !  Print debugging information if desired
283:       if (debug .eq. 1) then
284:         if (rank .eq. 0) write(6,*) 'initial guess vector'
285:         call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
286:       endif

288:       return
289:       end

291: !  ------------------------------------------------------------------------
292: !
293: !  ExactSolution - Computes the exact solution at a given time.
294: !
295: !  Input Parameters:
296: !    t - current time
297: !    appctx - user-defined application context
298: !
299: !  Output Parameter:
300: !    ierr - error code
301: !
302: !  Note: The solution vector is stored in the common block!
303: !
304:       subroutine ExactSolution(t,ierr)
305:       implicit none
306: #include "ex2f.h"

308: !  Input/output parameters:
309:       PetscReal  t
310:       PetscScalar  x
311:       PetscErrorCode          ierr

313: !  Local variables:
314:       PetscScalar  localptr(1)
315:       PetscInt           i,mybase,myend
316:       PetscOffset       idx

318: !  Determine starting and ending point of each processors range of
319: !  grid values.  Note that we shift by 1 to convert from the 0-based
320: !  C convention of starting indices to the 1-based Fortran convention.

322:       call VecGetOwnershipRange(solution,mybase,myend,ierr)
323:       mybase = mybase + 1

325: !  Get a pointer to vector data
326:       call VecGetArray(solution,localptr,idx,ierr)

328: !  Simply write the solution directly into the array locations
329: !  Alternatively, we could use VecSetValues() or VecSetValuesLocal().

331:       do 10, i=mybase,myend
332: !       x - current location in global grid
333:         x = h*(i-1)
334:         localptr(i-mybase+idx+1) = (t + one_d0)*(one_d0 + x*x)
335:  10   continue

337: !  Restore vector

339:       call VecRestoreArray(solution,localptr,idx,ierr)

341:       return
342:       end

344: !  ------------------------------------------------------------------------
345: !
346: !   MyMonitor - A user-provided routine to monitor the solution computed at
347: !   each time-step.  This example plots the solution and computes the
348: !   error in two different norms.
349: !
350: !   Input Parameters:
351: !   ts     - the time-step context
352: !   step   - the count of the current step (with 0 meaning the
353: !            initial condition)
354: !   time   - the current time
355: !   u      - the solution at this timestep
356: !   dummy  - optional user-provided context for this monitoring
357: !            routine (not used here)
358: !
359:       subroutine MyMonitor(ts,step,time,u,dummy,ierr)
360:       implicit none
361: #include "ex2f.h"

363: !  Input/output parameters:
364:       TS               ts
365:       PetscInt          step
366:       PetscObject dummy
367:       PetscReal time
368:       Vec              u
369:       PetscErrorCode          ierr

371: !  Local variables:
372:       PetscScalar en2,en2s,enmax
373:       PetscScalar mone
374:       PetscDraw             draw

376:       mone = -1.0d0

378: !  We use the default X windows viewer
379: !       PETSC_VIEWER_DRAW_WORLD
380: !  that is associated with the PETSC_COMM_WORLD communicator. This
381: !  saves the effort of calling PetscViewerDrawOpen() to create the window.
382: !  Note that if we wished to plot several items in separate windows we
383: !  would create each viewer with PetscViewerDrawOpen() and store them in
384: !  the application context, appctx.
385: !
386: !  Double buffering makes graphics look better.

388:       call PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_WORLD,0,draw,ierr)
389:       call PetscDrawSetDoubleBuffer(draw,ierr)
390:       call VecView(u,PETSC_VIEWER_DRAW_WORLD,ierr)

392: !  Compute the exact solution at this time-step.  Note that the
393: !  solution vector is passed via the user-defined common block.
394:       call ExactSolution(time,ierr)

396: !  Print debugging information if desired
397:       if (debug .eq. 1) then
398:         if (rank .eq. 0) write(6,*) 'Computed solution vector'
399:         call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
400:         if (rank .eq. 0) write(6,*) 'Exact solution vector'
401:         call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
402:       endif

404: !  Compute the 2-norm and max-norm of the error
405:       call VecAXPY(mone,u,solution,ierr)
406:       call VecNorm(solution,NORM_2,en2,ierr)
407: !  Scale the 2-norm by the grid spacing
408:       en2s = dsqrt(h)*en2
409:       call VecNorm(solution,NORM_MAX,enmax,ierr)

411: !  Print only from processor 0
412:       if (rank .eq. 0) write(6,100) step,time,en2s,enmax
413:  100  format('Timestep = ',i5,',time = ',f8.3,                          &
414:      &       ' sec, error [2-norm] = ',e9.3,                            &
415:      &       ', error [max-norm] = ',e9.3)

417: !  Print debugging information if desired
418:       if (debug .eq. 1) then
419:         if (rank .eq. 0) write(6,*) 'Error vector'
420:         call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
421:       endif

423:       return
424:       end

426: !  ------------------------------------------------------------------------
427: !
428: !   RHSFunction - User-provided routine that evalues the RHS function
429: !   in the ODE.  This routine is set in the main program by calling
430: !   TSSetRHSFunction().  We compute:
431: !         global_out = F(global_in)
432: !
433: !   Input Parameters:
434: !      ts         - timestep context
435: !      t          - current time
436: !      global_in  - input vector to function
437: !      dummy      - (optional) user-provided context for function evaluation
438: !                   (not used here because we use a common block instead)
439: !
440: !   Output Parameter:
441: !      global_out - value of function
442: !
443:       subroutine RHSFunction(ts,t,global_in,global_out,dummy)
444:       implicit none
445: #include "ex2f.h"

447: !  Input/output parameters:
448:       TS               ts
449:       PetscReal t
450:       Vec              global_in,global_out
451:       integer          dummy

453: !  Local variables:
454:       PetscScalar localptr(1),copyptr(1),sc
455:       PetscErrorCode ierr
456:       PetscInt          i,il,localsize
457:       PetscOffset      idx_c,idx_l

459: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
460: !  Get ready for local function computations
461: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

463: !  Scatter ghost points to local vector, using the 2-step process
464: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
465: !  By placing code between these two statements, computations can be
466: !  done while messages are in transition.

468:       call DAGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
469:       call DAGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)

471: !  Access directly the values in our local INPUT work vector

473:       call VecGetArray(u_local,localptr,idx_l,ierr)

475: !  Access directly the values in our local OUTPUT work vector

477:       call VecGetArray(localwork,copyptr,idx_c,ierr)

479:       sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))

481: !  Evaluate our function on the nodes owned by this processor

483:       call VecGetLocalSize(u_local,localsize,ierr)

485: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
486: !  Compute entries for the locally owned part
487: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

489: !  Handle boundary conditions: This is done by using the boundary condition
490: !        u(t,boundary) = g(t,boundary)
491: !  for some function g. Now take the derivative with respect to t to obtain
492: !        u_{t}(t,boundary) = g_{t}(t,boundary)
493: !
494: !  In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
495: !          and  u(t,1) = 2t+ 1, so that u_{t}(t,1) = 2

497:        if (rank .eq. 0)      copyptr(1+idx_c) = one_d0
498:        if (rank .eq. size-1) copyptr(localsize+idx_c) = two_d0

500: !  Handle the interior nodes where the PDE is replace by finite
501: !  difference operators.

503:       do 10, i=2,localsize-1
504:          il = i + idx_l
505:          copyptr(i+idx_c) =  localptr(il) * sc *                        &
506:      &     (localptr(il+1) + localptr(il-1) - two_d0*localptr(il))
507:  10   continue

509:       call VecRestoreArray(u_local,localptr,idx_l,ierr)
510:       call VecRestoreArray(localwork,copyptr,idx_c,ierr)

512: !  Return the values from our local OUTPUT vector into our global
513: !  output vector.

515:       call DALocalToGlobal(da,localwork,INSERT_VALUES,global_out,ierr)

517: !  Print debugging information if desired
518:       if (debug .eq. 1) then
519:         if (rank .eq. 0) write(6,*) 'RHS function vector'
520:         call VecView(global_out,PETSC_VIEWER_STDOUT_WORLD,ierr)
521:       endif

523:       return
524:       end

526: !  ------------------------------------------------------------------------
527: !
528: !  RHSJacobian - User-provided routine to compute the Jacobian of the
529: !                right-hand-side function.
530: !
531: !  Input Parameters:
532: !     ts - the TS context
533: !     t - current time
534: !     global_in - global input vector
535: !     dummy - optional user-defined context, as set by TSetRHSJacobian()
536: !
537: !  Output Parameters:
538: !     A - Jacobian matrix
539: !     B - optionally different preconditioning matrix
540: !     str - flag indicating matrix structure
541: !
542: !  Notes:
543: !  RHSJacobian computes entries for the locally owned part of the Jacobian.
544: !   - Currently, all PETSc parallel matrix formats are partitioned by
545: !     contiguous chunks of rows across the processors. The "grow"
546: !     parameter computed below specifies the global row number
547: !     corresponding to each local grid point.
548: !   - Each processor needs to insert only elements that it owns
549: !     locally (but any non-local elements will be sent to the
550: !     appropriate processor during matrix assembly).
551: !   - Always specify global row and columns of matrix entries.
552: !   - Here, we set all entries for a particular row at once.
553: !   - Note that MatSetValues() uses 0-based row and column numbers
554: !     in Fortran as well as in C.

556:       subroutine RHSJacobian(ts,t,global_in,A,B,str,dummy)
557:       implicit none
558: #include "ex2f.h"

560: !  Input/output parameters:
561:       TS               ts
562:       PetscReal t
563:       Vec              global_in
564:       Mat              A,B
565:       MatStructure     str
566:       integer          dummy

568: !  Local variables:
569:       PetscScalar localptr(1),sc,v(3)
570:       PetscInt         i,col(3),is,i1,i3
571:       PetscErrorCode ierr
572:       PetscInt          mstart,mend,mstarts,mends
573:       PetscOffset      idx

575: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
576: !  Get ready for local Jacobian computations
577: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

579: !  Scatter ghost points to local vector, using the 2-step process
580: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
581: !  By placing code between these two statements, computations can be
582: !  done while messages are in transition.

584:       call DAGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
585:       call DAGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)

587: !  Get pointer to vector data

589:       call VecGetArray(u_local,localptr,idx,ierr)

591: !  Get starting and ending locally owned rows of the matrix

593:       call MatGetOwnershipRange(A,mstarts,mends,ierr)
594:       mstart = mstarts
595:       mend   = mends

597: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
598: !  Compute entries for the locally owned part of the Jacobian.
599: !   - Currently, all PETSc parallel matrix formats are partitioned by
600: !     contiguous chunks of rows across the processors.
601: !   - Each processor needs to insert only elements that it owns
602: !     locally (but any non-local elements will be sent to the
603: !     appropriate processor during matrix assembly).
604: !   - Here, we set all entries for a particular row at once.
605: !   - We can set matrix entries either using either
606: !     MatSetValuesLocal() or MatSetValues().
607: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

609: !  Set matrix rows corresponding to boundary data
610:       i1 = 1
611:       i3 = 3
612:       if (mstart .eq. 0) then
613:         v(1) = zero_d0
614:         call MatSetValues(A,i1,mstart,i1,mstart,v,INSERT_VALUES,ierr)
615:         mstart = mstart + 1
616:       endif
617:       if (mend .eq. m) then
618:         mend = mend - 1
619:         v(1) = zero_d0
620:         call MatSetValues(A,i1,mend,i1,mend,v,INSERT_VALUES,ierr)
621:       endif

623: !  Set matrix rows corresponding to interior data.
624: !  We construct the matrix one row at a time.

626:       sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
627:       do 10, i=mstart,mend-1
628:          col(1) = i-1
629:          col(2) = i
630:          col(3) = i+1
631:          is     = i - mstart + 1 +idx + 1
632:          v(1)   = sc*localptr(is)
633:          v(2)   = sc*(localptr(is+1) + localptr(is-1) -                 &
634:      &                four_d0*localptr(is))
635:          v(3)   = sc*localptr(is)
636:         call MatSetValues(A,i1,i,i3,col,v,INSERT_VALUES,ierr)
637:  10   continue

639: !  Restore vector

641:       call VecRestoreArray(u_local,localptr,idx,ierr)

643: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
644: !  Complete the matrix assembly process and set some options
645: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

647: !  Assemble matrix, using the 2-step process:
648: !       MatAssemblyBegin(), MatAssemblyEnd()
649: !  Computations can be done while messages are in transition,
650: !  by placing code between these two statements.

652:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
653:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

655: !  Set flag to indicate that the Jacobian matrix retains an identical
656: !  nonzero structure throughout all timestepping iterations (although the
657: !  values of the entries change). Thus, we can save some work in setting
658: !  up the preconditioner (e.g., no need to redo symbolic factorization for
659: !  ILU/ICC preconditioners).
660: !   - If the nonzero structure of the matrix is different during
661: !     successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
662: !     must be used instead.  If you are unsure whether the matrix
663: !     structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
664: !   - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
665: !     believes your assertion and does not check the structure
666: !     of the matrix.  If you erroneously claim that the structure
667: !     is the same when it actually is not, the new preconditioner
668: !     will not function correctly.  Thus, use this optimization
669: !     feature with caution!

671:       str = SAME_NONZERO_PATTERN

673: !  Set and option to indicate that we will never add a new nonzero location
674: !  to the matrix. If we do, it will generate an error.

676:       call MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,ierr)

678:       return
679:       end