Actual source code: ex5f.F

  1: ! "$Id: ex5f.F,v 1.60 2001/03/22 20:32:01 bsmith Exp $";
  2: !
  3: !  Program usage:  mpirun -np <procs> ex5f [-help] [all PETSc options]
  4: !
  5: !  Description: This example solves a nonlinear system in parallel with SNES.
  6: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
  7: !  domain, using distributed arrays (DAs) to partition the parallel grid.
  8: !  The command line options include:
  9: !    -par <param>, where <param> indicates the nonlinearity of the problem
 10: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
 11: !    -mx <xg>, where <xg> = number of grid points in the x-direction
 12: !    -my <yg>, where <yg> = number of grid points in the y-direction
 13: !    -Nx <npx>, where <npx> = number of processors in the x-direction
 14: !    -Ny <npy>, where <npy> = number of processors in the y-direction
 15: !
 16: !/*T
 17: !  Concepts: SNES^parallel Bratu example
 18: !  Concepts: DA^using distributed arrays;
 19: !  Processors: n
 20: !T*/
 21: !
 22: !  --------------------------------------------------------------------------
 23: !
 24: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 25: !  the partial differential equation
 26: !
 27: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 28: !
 29: !  with boundary conditions
 30: !
 31: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
 32: !
 33: !  A finite difference approximation with the usual 5-point stencil
 34: !  is used to discretize the boundary value problem to obtain a nonlinear
 35: !  system of equations.
 36: !
 37: !  The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
 38: !
 39: !  --------------------------------------------------------------------------

 41:       program main
 42:       implicit none
 43: !
 44: !  We place common blocks, variable declarations, and other include files
 45: !  needed for this code in the single file ex5f.h.  We then need to include
 46: !  only this file throughout the various routines in this program.  See
 47: !  additional comments in the file ex5f.h.
 48: !
 49: #include "ex5f.h"

 51: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 52: !                   Variable declarations
 53: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 54: !
 55: !  Variables:
 56: !     snes        - nonlinear solver
 57: !     x, r        - solution, residual vectors
 58: !     J           - Jacobian matrix
 59: !     its         - iterations for convergence
 60: !     Nx, Ny      - number of preocessors in x- and y- directions
 61: !     matrix_free - flag - 1 indicates matrix-free version
 62: !
 63: !  See additional variable declarations in the file ex5f.h
 64: !
 65:       SNES                   snes
 66:       Vec                    x,r
 67:       Mat                    J
 68:       ISLocalToGlobalMapping isltog
 69:       integer                its,Nx,Ny,matrix_free,flg,N,ierr,m
 70:       double precision       lambda_max,lambda_min

 72: !  Note: Any user-defined Fortran routines (such as FormJacobian)
 73: !  MUST be declared as external.

 75:       external FormFunction,FormInitialGuess,FormJacobian

 77: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 78: !  Initialize program
 79: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 81:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 82:       call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)
 83:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)

 85: !  Initialize problem parameters

 87:       lambda_max = 6.81
 88:       lambda_min = 0.0
 89:       lambda     = 6.0
 90:       mx         = 4
 91:       my         = 4
 92:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-mx',mx,flg,ierr)
 93:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-my',my,flg,ierr)
 94:       call PetscOptionsGetDouble(PETSC_NULL_CHARACTER,'-par',lambda,    &
 95:      &                           flg,ierr)
 96:       if (lambda .ge. lambda_max .or. lambda .le. lambda_min) then
 97:          if (rank .eq. 0) write(6,*) 'Lambda is out of range'
 98:          SETERRQ(1,' ',ierr)
 99:       endif
100:       N = mx*my

102: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103: !  Create nonlinear solver context
104: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

106:       call SNESCreate(PETSC_COMM_WORLD,SNES_NONLINEAR_EQUATIONS,        &
107:      &                snes,ierr)

109: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
110: !  Create vector data structures; set function evaluation routine
111: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

113: !  Create distributed array (DA) to manage parallel grid and vectors

115:       Nx = PETSC_DECIDE
116:       Ny = PETSC_DECIDE
117:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-Nx',Nx,flg,ierr)
118:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-Ny',Ny,flg,ierr)
119:       if (Nx*Ny .ne. size .and.                                         &
120:      &      (Nx .ne. PETSC_DECIDE .or. Ny .ne. PETSC_DECIDE)) then
121:          if (rank .eq. 0)  then
122:             write(6,*) 'Incompatible number of procs: Nx * Ny != size'
123:          endif
124:          SETERRQ(1,' ',ierr)
125:       endif
126: ! This really needs only the star-type stencil, but we use the box
127: ! stencil temporarily.
128:       call DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_BOX,mx,          &
129:      &     my,Nx,Ny,1,1,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)

131: !
132: !   Visualize the distribution of the array across the processors
133: !
134: !     call DAView(da,PETSC_VIEWER_DRAW_WORLD,ierr)

136: !  Extract global and local vectors from DA; then duplicate for remaining
137: !  vectors that are the same types

139:       call DACreateGlobalVector(da,x,ierr)
140:       call DACreateLocalVector(da,localX,ierr)
141:       call VecDuplicate(x,r,ierr)
142:       call VecDuplicate(localX,localF,ierr)

144: !  Get local grid boundaries (for 2-dimensional DA)

146:       call DAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,                      &
147:      &     PETSC_NULL_INTEGER,ierr)
148:       call DAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,       &
149:      &     PETSC_NULL_INTEGER,ierr)

151: !  Here we shift the starting indices up by one so that we can easily
152: !  use the Fortran convention of 1-based indices (rather 0-based indices).

154:       xs  = xs+1
155:       ys  = ys+1
156:       gxs = gxs+1
157:       gys = gys+1

159:       ye  = ys+ym-1
160:       xe  = xs+xm-1
161:       gye = gys+gym-1
162:       gxe = gxs+gxm-1

164: !  Set function evaluation routine and vector

166:       call SNESSetFunction(snes,r,FormFunction,PETSC_NULL_OBJECT,ierr)

168: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169: !  Create matrix data structure; set Jacobian evaluation routine
170: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

172: !  Set Jacobian matrix data structure and default Jacobian evaluation
173: !  routine. User can override with:
174: !     -snes_fd : default finite differencing approximation of Jacobian
175: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
176: !                (unless user explicitly sets preconditioner)
177: !     -snes_mf_operator : form preconditioning matrix as set by the user,
178: !                         but use matrix-free approx for Jacobian-vector
179: !                         products within Newton-Krylov method
180: !
181: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
182: !     accordingly.  When using distributed arrays (DAs) to create vectors,
183: !     the DAs determine the problem partitioning.  We must explicitly
184: !     specify the local matrix dimensions upon its creation for compatibility
185: !     with the vector distribution.  Thus, the generic MatCreate() routine
186: !     is NOT sufficient when working with distributed arrays.
187: !
188: !     Note: Here we only approximately preallocate storage space for the
189: !     Jacobian.  See the users manual for a discussion of better techniques
190: !     for preallocating matrix memory.

192:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf',         &
193:      &     matrix_free,ierr)
194:       if (matrix_free .eq. 0) then
195:          if (size .eq. 1) then
196:             call MatCreateSeqAIJ(PETSC_COMM_WORLD,N,N,5,                &
197:      &           PETSC_NULL_INTEGER,J,ierr)
198:         else
199:           call VecGetLocalSize(x,m,ierr)
200:           call MatCreateMPIAIJ(PETSC_COMM_WORLD,m,m,N,N,5,              &
201:      &         PETSC_NULL_INTEGER,3,PETSC_NULL_INTEGER,J,ierr)
202:         endif
203:         call SNESSetJacobian(snes,J,J,FormJacobian,PETSC_NULL_OBJECT,   &
204:      &       ierr)

206: !       Get the mapping from local-to-global node numbers for all local nodes,
207: !       including ghost points.  Associate this mapping with the matrix for later
208: !       use in setting matrix entries via MatSetValuesLocal().

210:         call DAGetISLocalToGlobalMapping(da,isltog,ierr)
211:         call MatSetLocalToGlobalMapping(J,isltog,ierr)

213:       endif

215: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216: !  Customize nonlinear solver; set runtime options
217: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

219: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)

221:       call SNESSetFromOptions(snes,ierr)

223: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: !  Evaluate initial guess; then solve nonlinear system.
225: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

227: !  Note: The user should initialize the vector, x, with the initial guess
228: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
229: !  to employ an initial guess of zero, the user should explicitly set
230: !  this vector to zero by calling VecSet().

232:       call FormInitialGuess(x,ierr)
233:       call SNESSolve(snes,x,its,ierr)
234:       if (rank .eq. 0) then
235:          write(6,100) its
236:       endif
237:   100 format('Number of Newton iterations = ',i5)

239: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
240: !  Free work space.  All PETSc objects should be destroyed when they
241: !  are no longer needed.
242: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

244:       if (matrix_free .eq. 0) call MatDestroy(J,ierr)
245:       call VecDestroy(x,ierr)
246:       call VecDestroy(r,ierr)
247:       call VecDestroy(localX,ierr)
248:       call VecDestroy(localF,ierr)
249:       call SNESDestroy(snes,ierr)
250:       call DADestroy(da,ierr)
251:       call PetscFinalize(ierr)
252:       end

254: ! ---------------------------------------------------------------------
255: !
256: !  FormInitialGuess - Forms initial approximation.
257: !
258: !  Input Parameters:
259: !  X - vector
260: !
261: !  Output Parameter:
262: !  X - vector
263: !
264: !  Notes:
265: !  This routine serves as a wrapper for the lower-level routine
266: !  "ApplicationInitialGuess", where the actual computations are
267: !  done using the standard Fortran style of treating the local
268: !  vector data as a multidimensional array over the local mesh.
269: !  This routine merely handles ghost point scatters and accesses
270: !  the local vector data via VecGetArray() and VecRestoreArray().
271: !
272:       subroutine FormInitialGuess(X,ierr)
273:       implicit none

275: #include "ex5f.h"

277: !  Input/output variables:
278:       Vec      X
279:       integer  ierr

281: !  Declarations for use with local arrays:
282:       Scalar      lx_v(0:1)
283:       PetscOffset lx_i

285:       0

287: !  Get a pointer to vector data.
288: !    - For default PETSc vectors, VecGetArray() returns a pointer to
289: !      the data array.  Otherwise, the routine is implementation dependent.
290: !    - You MUST call VecRestoreArray() when you no longer need access to
291: !      the array.
292: !    - Note that the Fortran interface to VecGetArray() differs from the
293: !      C version.  See the users manual for details.

295:       call VecGetArray(localX,lx_v,lx_i,ierr)

297: !  Compute initial guess over the locally owned part of the grid

299:       call ApplicationInitialGuess(lx_v(lx_i),ierr)

301: !  Restore vector

303:       call VecRestoreArray(localX,lx_v,lx_i,ierr)

305: !  Insert values into global vector

307:       call DALocalToGlobal(da,localX,INSERT_VALUES,X,ierr)

309:       return
310:       end

312: ! ---------------------------------------------------------------------
313: !
314: !  ApplicationInitialGuess - Computes initial approximation, called by
315: !  the higher level routine FormInitialGuess().
316: !
317: !  Input Parameter:
318: !  x - local vector data
319: !
320: !  Output Parameters:
321: !  x - local vector data
322: !  ierr - error code
323: !
324: !  Notes:
325: !  This routine uses standard Fortran-style computations over a 2-dim array.
326: !
327:       subroutine ApplicationInitialGuess(x,ierr)
328:       implicit none

330: #include "ex5f.h"

332: !  Input/output variables:
333:       Scalar  x(gxs:gxe,gys:gye)
334:       integer ierr

336: !  Local variables:
337:       integer  i,j,hxdhy,hydhx
338:       Scalar   temp1,temp,hx,hy,sc,one

340: !  Set parameters

342:       ierr   = 0
343:       one    = 1.0
344:       hx     = one/(dble(mx-1))
345:       hy     = one/(dble(my-1))
346:       sc     = hx*hy*lambda
347:       hxdhy  = hx/hy
348:       hydhx  = hy/hx
349:       temp1  = lambda/(lambda + one)

351:       do 20 j=ys,ye
352:          temp = dble(min(j-1,my-j))*hy
353:          do 10 i=xs,xe
354:             if (i .eq. 1 .or. j .eq. 1                                  &
355:      &             .or. i .eq. mx .or. j .eq. my) then
356:               x(i,j) = 0.0
357:             else
358:               x(i,j) = temp1 *                                          &
359:      &          sqrt(min(dble(min(i-1,mx-i)*hx),dble(temp)))
360:             endif
361:  10      continue
362:  20   continue

364:       return
365:       end

367: ! ---------------------------------------------------------------------
368: !
369: !  FormFunction - Evaluates nonlinear function, F(x).
370: !
371: !  Input Parameters:
372: !  snes - the SNES context
373: !  X - input vector
374: !  dummy - optional user-defined context, as set by SNESSetFunction()
375: !          (not used here)
376: !
377: !  Output Parameter:
378: !  F - function vector
379: !
380: !  Notes:
381: !  This routine serves as a wrapper for the lower-level routine
382: !  "ApplicationFunction", where the actual computations are
383: !  done using the standard Fortran style of treating the local
384: !  vector data as a multidimensional array over the local mesh.
385: !  This routine merely handles ghost point scatters and accesses
386: !  the local vector data via VecGetArray() and VecRestoreArray().
387: !
388:       subroutine FormFunction(snes,X,F,dummy,ierr)
389:       implicit none

391: #include "ex5f.h"

393: !  Input/output variables:
394:       SNES     snes
395:       Vec      X,F
396:       integer  dummy
397:       integer  ierr

399: !  Declarations for use with local arrays:
400:       Scalar      lx_v(0:1),lf_v(0:1)
401:       PetscOffset lx_i,lf_i

403: !  Scatter ghost points to local vector, using the 2-step process
404: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
405: !  By placing code between these two statements, computations can
406: !  be done while messages are in transition.

408:       call DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
409:       call DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

411: !  Get pointers to vector data.
412: !    - For default PETSc vectors, VecGetArray() returns a pointer to
413: !      the data array.  Otherwise, the routine is implementation dependent.
414: !    - You MUST call VecRestoreArray() when you no longer need access to
415: !      the array.
416: !    - Note that the Fortran interface to VecGetArray() differs from the
417: !      C version.  See the Fortran chapter of the users manual for details.

419:       call VecGetArray(localX,lx_v,lx_i,ierr)
420:       call VecGetArray(localF,lf_v,lf_i,ierr)

422: !  Compute function over the locally owned part of the grid

424:       call ApplicationFunction(lx_v(lx_i),lf_v(lf_i),ierr)

426: !  Restore vectors

428:       call VecRestoreArray(localX,lx_v,lx_i,ierr)
429:       call VecRestoreArray(localF,lf_v,lf_i,ierr)

431: !  Insert values into global vector

433:       call DALocalToGlobal(da,localF,INSERT_VALUES,F,ierr)
434:       call PetscLogFlops(11*ym*xm,ierr)

436: !      call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
437: !      call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)

439:       return
440:       end

442: ! ---------------------------------------------------------------------
443: !
444: !  ApplicationFunction - Computes nonlinear function, called by
445: !  the higher level routine FormFunction().
446: !
447: !  Input Parameter:
448: !  x - local vector data
449: !
450: !  Output Parameters:
451: !  f - local vector data, f(x)
452: !  ierr - error code
453: !
454: !  Notes:
455: !  This routine uses standard Fortran-style computations over a 2-dim array.
456: !
457:       subroutine ApplicationFunction(x,f,ierr)

459:       implicit none

461: #include "ex5f.h"

463: !  Input/output variables:
464:       Scalar   x(gxs:gxe,gys:gye),f(gxs:gxe,gys:gye)
465:       integer  ierr

467: !  Local variables:
468:       Scalar   two,one,hx,hy,hxdhy,hydhx,sc
469:       Scalar   u,uxx,uyy
470:       integer  i,j

472:       one    = 1.0
473:       two    = 2.0
474:       hx     = one/dble(mx-1)
475:       hy     = one/dble(my-1)
476:       sc     = hx*hy*lambda
477:       hxdhy  = hx/hy
478:       hydhx  = hy/hx

480: !  Compute function over the locally owned part of the grid

482:       do 20 j=ys,ye
483:          do 10 i=xs,xe
484:             if (i .eq. 1 .or. j .eq. 1                                  &
485:      &             .or. i .eq. mx .or. j .eq. my) then
486:                f(i,j) = x(i,j)
487:             else
488:                u = x(i,j)
489:                uxx = hydhx * (two*u                                     &
490:      &                - x(i-1,j) - x(i+1,j))
491:                uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
492:                f(i,j) = uxx + uyy - sc*exp(u)
493:             endif
494:  10      continue
495:  20   continue

497:       return
498:       end

500: ! ---------------------------------------------------------------------
501: !
502: !  FormJacobian - Evaluates Jacobian matrix.
503: !
504: !  Input Parameters:
505: !  snes     - the SNES context
506: !  x        - input vector
507: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
508: !             (not used here)
509: !
510: !  Output Parameters:
511: !  jac      - Jacobian matrix
512: !  jac_prec - optionally different preconditioning matrix (not used here)
513: !  flag     - flag indicating matrix structure
514: !
515: !  Notes:
516: !  This routine serves as a wrapper for the lower-level routine
517: !  "ApplicationJacobian", where the actual computations are
518: !  done using the standard Fortran style of treating the local
519: !  vector data as a multidimensional array over the local mesh.
520: !  This routine merely accesses the local vector data via
521: !  VecGetArray() and VecRestoreArray().
522: !
523: !  Notes:
524: !  Due to grid point reordering with DAs, we must always work
525: !  with the local grid points, and then transform them to the new
526: !  global numbering with the local-to-global mapping.  We
527: !  cannot work directly with the global numbers for the original
528: !  uniprocessor grid!
529: !
530: !  Two methods are available for imposing this transformation
531: !  when setting matrix entries:
532: !    (A) MatSetValuesLocal(), using the local ordering (including
533: !        ghost points!)
534: !         - Do the following two steps once, before calling SNESSolve()
535: !           - Use DAGetISLocalToGlobalMapping() to extract the
536: !             local-to-global map from the DA
537: !           - Associate this map with the matrix by calling
538: !             MatSetLocalToGlobalMapping()
539: !        - Then set matrix entries using the local ordering
540: !          by calling MatSetValuesLocal()
541: !    (B) MatSetValues(), using the global ordering
542: !        - Use DAGetGlobalIndices() to extract the local-to-global map
543: !        - Then apply this map explicitly yourself
544: !        - Set matrix entries using the global ordering by calling
545: !          MatSetValues()
546: !  Option (A) seems cleaner/easier in many cases, and is the procedure
547: !  used in this example.
548: !
549:       subroutine FormJacobian(snes,X,jac,jac_prec,flag,dummy,ierr)
550:       implicit none

552: #include "ex5f.h"

554: !  Input/output variables:
555:       SNES         snes
556:       Vec          X
557:       Mat          jac,jac_prec
558:       MatStructure flag
559:       integer      dummy
560:       integer      ierr

562: !  Declarations for use with local arrays:
563:       Scalar      lx_v(0:1)
564:       PetscOffset lx_i

566: !  Scatter ghost points to local vector,using the 2-step process
567: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd()
568: !  Computations can be done while messages are in transition,
569: !  by placing code between these two statements.

571:       call DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
572:       call DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

574: !  Get a pointer to vector data

576:       call VecGetArray(localX,lx_v,lx_i,ierr)

578: !  Compute entries for the locally owned part of the Jacobian.

580:       call ApplicationJacobian(lx_v(lx_i),jac,jac_prec,ierr)

582: !  Assemble matrix, using the 2-step process:
583: !     MatAssemblyBegin(), MatAssemblyEnd()
584: !  Computations can be done while messages are in transition,
585: !  by placing code between these two statements.

587:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
588:       call VecRestoreArray(localX,lx_v,lx_i,ierr)
589:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)

591: !  Set flag to indicate that the Jacobian matrix retains an identical
592: !  nonzero structure throughout all nonlinear iterations (although the
593: !  values of the entries change). Thus, we can save some work in setting
594: !  up the preconditioner (e.g., no need to redo symbolic factorization for
595: !  ILU/ICC preconditioners).
596: !   - If the nonzero structure of the matrix is different during
597: !     successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
598: !     must be used instead.  If you are unsure whether the matrix
599: !     structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
600: !   - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
601: !     believes your assertion and does not check the structure
602: !     of the matrix.  If you erroneously claim that the structure
603: !     is the same when it actually is not, the new preconditioner
604: !     will not function correctly.  Thus, use this optimization
605: !     feature with caution!

607:       flag = SAME_NONZERO_PATTERN

609: !  Tell the matrix we will never add a new nonzero location to the
610: !  matrix. If we do it will generate an error.

612:        call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,ierr)

614:       return
615:       end

617: ! ---------------------------------------------------------------------
618: !
619: !  ApplicationJacobian - Computes Jacobian matrix, called by
620: !  the higher level routine FormJacobian().
621: !
622: !  Input Parameters:
623: !  x        - local vector data
624: !
625: !  Output Parameters:
626: !  jac      - Jacobian matrix
627: !  jac_prec - optionally different preconditioning matrix (not used here)
628: !  ierr     - error code
629: !
630: !  Notes:
631: !  This routine uses standard Fortran-style computations over a 2-dim array.
632: !
633: !  Notes:
634: !  Due to grid point reordering with DAs, we must always work
635: !  with the local grid points, and then transform them to the new
636: !  global numbering with the "ltog" mapping (via DAGetGlobalIndices()).
637: !  We cannot work directly with the global numbers for the original
638: !  uniprocessor grid!
639: !
640: !  Two methods are available for imposing this transformation
641: !  when setting matrix entries:
642: !    (A) MatSetValuesLocal(), using the local ordering (including
643: !        ghost points!)
644: !        - Use DAGetGlobalIndices() to extract the local-to-global map
645: !        - Associate this map with the matrix by calling
646: !          MatSetLocalToGlobalMapping() once
647: !        - Set matrix entries using the local ordering
648: !          by calling MatSetValuesLocal()
649: !    (B) MatSetValues(), using the global ordering
650: !        - Use DAGetGlobalIndices() to extract the local-to-global map
651: !        - Then apply this map explicitly yourself
652: !        - Set matrix entries using the global ordering by calling
653: !          MatSetValues()
654: !  Option (A) seems cleaner/easier in many cases, and is the procedure
655: !  used in this example.
656: !
657:       subroutine ApplicationJacobian(x,jac,jac_prec,ierr)
658:       implicit none

660: #include "ex5f.h"

662: !  Input/output variables:
663:       Scalar   x(gxs:gxe,gys:gye)
664:       Mat      jac,jac_prec
665:       integer  ierr

667: !  Local variables:
668:       integer  row,col(5),i,j
669:       Scalar   two,one,hx,hy,hxdhy,hydhx,sc,v(5)

671: !  Set parameters

673:       one    = 1.0
674:       two    = 2.0
675:       hx     = one/dble(mx-1)
676:       hy     = one/dble(my-1)
677:       sc     = hx*hy
678:       hxdhy  = hx/hy
679:       hydhx  = hy/hx

681: !  Compute entries for the locally owned part of the Jacobian.
682: !   - Currently, all PETSc parallel matrix formats are partitioned by
683: !     contiguous chunks of rows across the processors.
684: !   - Each processor needs to insert only elements that it owns
685: !     locally (but any non-local elements will be sent to the
686: !     appropriate processor during matrix assembly).
687: !   - Here, we set all entries for a particular row at once.
688: !   - We can set matrix entries either using either
689: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
690: !   - Note that MatSetValues() uses 0-based row and column numbers
691: !     in Fortran as well as in C.

693:       do 20 j=ys,ye
694:          row = (j - gys)*gxm + xs - gxs - 1
695:          do 10 i=xs,xe
696:             row = row + 1
697: !           boundary points
698:             if (i .eq. 1 .or. j .eq. 1                                  &
699:      &             .or. i .eq. mx .or. j .eq. my) then
700:                call MatSetValuesLocal(jac,1,row,1,row,one,              &
701:      &                           INSERT_VALUES,ierr)
702: !           interior grid points
703:             else
704:                v(1) = -hxdhy
705:                v(2) = -hydhx
706:                v(3) = two*(hydhx + hxdhy)                               &
707:      &                  - sc*lambda*exp(x(i,j))
708:                v(4) = -hydhx
709:                v(5) = -hxdhy
710:                col(1) = row - gxm
711:                col(2) = row - 1
712:                col(3) = row
713:                col(4) = row + 1
714:                col(5) = row + gxm
715:                call MatSetValuesLocal(jac,1,row,5,col,v,                &
716:      &                                INSERT_VALUES,ierr)
717:             endif
718:  10      continue
719:  20   continue

721:       return
722:       end