Actual source code: ex5f90.F

  1: !
  2: !  Description: Solves a nonlinear system in parallel with SNES.
  3: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
  4: !  domain, using distributed arrays (DAs) to partition the parallel grid.
  5: !  The command line options include:
  6: !    -par <parameter>, where <parameter> indicates the nonlinearity of the problem
  7: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
  8: !
  9: !/*T
 10: !  Concepts: SNES^parallel Bratu example
 11: !  Concepts: DA^using distributed arrays;
 12: !  Processors: n
 13: !T*/
 14: !
 15: !  --------------------------------------------------------------------------
 16: !
 17: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 18: !  the partial differential equation
 19: !
 20: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 21: !
 22: !  with boundary conditions
 23: !
 24: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
 25: !
 26: !  A finite difference approximation with the usual 5-point stencil
 27: !  is used to discretize the boundary value problem to obtain a nonlinear
 28: !  system of equations.
 29: !
 30: !  The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
 31: !
 32: !  --------------------------------------------------------------------------
 33: !  The following define must be used before including any PETSc include files
 34: !  into a module or interface. This is because they can't handle declarations
 35: !  in them
 36: !

 38:       module f90module
 39:       type userctx
 40: #define PETSC_AVOID_DECLARATIONS
 41:  #include include/finclude/petsc.h
 42:  #include include/finclude/petscvec.h
 43:  #include include/finclude/petscda.h
 44: #undef PETSC_AVOID_DECLARATIONS
 45:         DA      da
 46:         PetscInt xs,xe,xm,gxs,gxe,gxm
 47:         PetscInt ys,ye,ym,gys,gye,gym
 48:         PetscInt mx,my
 49:         PetscMPIInt rank
 50:         double precision lambda
 51:       end type userctx

 53:       contains
 54: ! ---------------------------------------------------------------------
 55: !
 56: !  FormFunction - Evaluates nonlinear function, F(x).
 57: !
 58: !  Input Parameters:
 59: !  snes - the SNES context
 60: !  X - input vector
 61: !  dummy - optional user-defined context, as set by SNESSetFunction()
 62: !          (not used here)
 63: !
 64: !  Output Parameter:
 65: !  F - function vector
 66: !
 67: !  Notes:
 68: !  This routine serves as a wrapper for the lower-level routine
 69: !  "FormFunctionLocal", where the actual computations are
 70: !  done using the standard Fortran style of treating the local
 71: !  vector data as a multidimensional array over the local mesh.
 72: !  This routine merely handles ghost point scatters and accesses
 73: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
 74: !
 75:       subroutine FormFunction(snes,X,F,user,ierr)
 76:       implicit none

 78:  #include include/finclude/petsc.h
 79:  #include include/finclude/petscvec.h
 80:  #include include/finclude/petscda.h
 81:  #include include/finclude/petscis.h
 82:  #include include/finclude/petscmat.h
 83:  #include include/finclude/petscksp.h
 84:  #include include/finclude/petscpc.h
 85:  #include include/finclude/petscsnes.h
 86: #include "include/finclude/petscvec.h90"

 88: !  Input/output variables:
 89:       SNES           snes
 90:       Vec            X,F
 91:       PetscErrorCode ierr
 92:       type (userctx) user

 94: !  Declarations for use with local arrays:
 95:       PetscScalar,pointer :: lx_v(:),lf_v(:)
 96:       Vec            localX

 98: !  Scatter ghost points to local vector, using the 2-step process
 99: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
100: !  By placing code between these two statements, computations can
101: !  be done while messages are in transition.
102:       call DAGetLocalVector(user%da,localX,ierr)
103:       call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES,                &
104:      &     localX,ierr)
105:       call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)

107: !  Get a pointer to vector data.
108: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
109: !      the data array. Otherwise, the routine is implementation dependent.
110: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
111: !      the array.
112: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
113: !      and is useable from Fortran-90 Only.

115:       call VecGetArrayF90(localX,lx_v,ierr)
116:       call VecGetArrayF90(F,lf_v,ierr)

118: !  Compute function over the locally owned part of the grid
119:       call FormFunctionLocal(lx_v,lf_v,user,ierr)

121: !  Restore vectors
122:       call VecRestoreArrayF90(localX,lx_v,ierr)
123:       call VecRestoreArrayF90(F,lf_v,ierr)

125: !  Insert values into global vector

127:       call DARestoreLocalVector(user%da,localX,ierr)
128:       call PetscLogFlops(11*user%ym*user%xm,ierr)

130: !      call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
131: !      call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
132:       return
133:       end subroutine formfunction
134:       end module f90module

136:       module f90moduleinterfaces
137:         use f90module
138: 
139:       Interface SNESSetApplicationContext
140:         Subroutine SNESSetApplicationContext(snes,ctx,ierr)
141:         use f90module
142:           SNES snes
143:           type(userctx) ctx
144:           PetscErrorCode ierr
145:         End Subroutine
146:       End Interface SNESSetApplicationContext

148:       Interface SNESGetApplicationContext
149:         Subroutine SNESGetApplicationContext(snes,ctx,ierr)
150:         use f90module
151:           SNES snes
152:           type(userctx), pointer :: ctx
153:           PetscErrorCode ierr
154:         End Subroutine
155:       End Interface SNESGetApplicationContext
156:       end module f90moduleinterfaces

158:       program main
159:       use f90module
160:       use f90moduleinterfaces
161:       implicit none
162: !
163:  #include include/finclude/petsc.h
164:  #include include/finclude/petscvec.h
165:  #include include/finclude/petscda.h
166:  #include include/finclude/petscis.h
167:  #include include/finclude/petscmat.h
168:  #include include/finclude/petscksp.h
169:  #include include/finclude/petscpc.h
170:  #include include/finclude/petscsnes.h
171: #include "include/finclude/petscvec.h90"
172: #include "include/finclude/petscda.h90"

174: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: !                   Variable declarations
176: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: !
178: !  Variables:
179: !     snes        - nonlinear solver
180: !     x, r        - solution, residual vectors
181: !     J           - Jacobian matrix
182: !     its         - iterations for convergence
183: !     Nx, Ny      - number of preocessors in x- and y- directions
184: !     matrix_free - flag - 1 indicates matrix-free version
185: !
186:       SNES             snes
187:       Vec              x,r
188:       Mat              J
189:       PetscErrorCode   ierr
190:       PetscInt         its
191:       PetscTruth       flg,matrix_free
192:       PetscInt         ione,nfour
193:       double precision lambda_max,lambda_min
194:       type (userctx)   user
195:       type(userctx), pointer:: puser

197: !  Note: Any user-defined Fortran routines (such as FormJacobian)
198: !  MUST be declared as external.
199:       external FormInitialGuess,FormJacobian

201: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: !  Initialize program
203: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
205:       call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)

207: !  Initialize problem parameters
208:       lambda_max  = 6.81
209:       lambda_min  = 0.0
210:       user%lambda = 6.0
211:       ione = 1
212:       nfour = -4
213:       call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par',             &
214:      &     user%lambda,flg,ierr)
215:       if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
216:      &     then
217:          if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
218:          SETERRQ(1,' ',ierr)
219:       endif

221: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
222: !  Create nonlinear solver context
223: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224:       call SNESCreate(PETSC_COMM_WORLD,snes,ierr)

226: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
227: !  Create vector data structures; set function evaluation routine
228: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

230: !  Create distributed array (DA) to manage parallel grid and vectors

232: ! This really needs only the star-type stencil, but we use the box
233: ! stencil temporarily.
234:       call DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_BOX,   &
235:      &     nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,             &
236:      &     PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr)
237:       call DAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my,        &
238:      &               PETSC_NULL_INTEGER,                                &
239:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
240:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
241:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
242:      &               PETSC_NULL_INTEGER,ierr)
243: 
244: !
245: !   Visualize the distribution of the array across the processors
246: !
247: !     call DAView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr)

249: !  Extract global and local vectors from DA; then duplicate for remaining
250: !  vectors that are the same types
251:       call DACreateGlobalVector(user%da,x,ierr)
252:       call VecDuplicate(x,r,ierr)

254: !  Get local grid boundaries (for 2-dimensional DA)
255:       call DAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER,     &
256:      &     user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
257:       call DAGetGhostCorners(user%da,user%gxs,user%gys,                 &
258:      &     PETSC_NULL_INTEGER,user%gxm,user%gym,                        &
259:      &     PETSC_NULL_INTEGER,ierr)

261: !  Here we shift the starting indices up by one so that we can easily
262: !  use the Fortran convention of 1-based indices (rather 0-based indices).
263:       user%xs  = user%xs+1
264:       user%ys  = user%ys+1
265:       user%gxs = user%gxs+1
266:       user%gys = user%gys+1

268:       user%ye  = user%ys+user%ym-1
269:       user%xe  = user%xs+user%xm-1
270:       user%gye = user%gys+user%gym-1
271:       user%gxe = user%gxs+user%gxm-1

273:       call SNESSetApplicationContext(snes,user,ierr)

275: !  Set function evaluation routine and vector
276:       call SNESSetFunction(snes,r,FormFunction,user,ierr)

278: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
279: !  Create matrix data structure; set Jacobian evaluation routine
280: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

282: !  Set Jacobian matrix data structure and default Jacobian evaluation
283: !  routine. User can override with:
284: !     -snes_fd : default finite differencing approximation of Jacobian
285: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
286: !                (unless user explicitly sets preconditioner)
287: !     -snes_mf_operator : form preconditioning matrix as set by the user,
288: !                         but use matrix-free approx for Jacobian-vector
289: !                         products within Newton-Krylov method
290: !
291: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
292: !     accordingly.  When using distributed arrays (DAs) to create vectors,
293: !     the DAs determine the problem partitioning.  We must explicitly
294: !     specify the local matrix dimensions upon its creation for compatibility
295: !     with the vector distribution.  Thus, the generic MatCreate() routine
296: !     is NOT sufficient when working with distributed arrays.
297: !
298: !     Note: Here we only approximately preallocate storage space for the
299: !     Jacobian.  See the users manual for a discussion of better techniques
300: !     for preallocating matrix memory.
301: 
302:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf',         &
303:      &     matrix_free,ierr)
304:       if (.not. matrix_free) then
305:         call DAGetMatrix(user%da,MATAIJ,J,ierr)
306:         call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
307:       endif

309: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
310: !  Customize nonlinear solver; set runtime options
311: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
312: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
313:       call SNESSetFromOptions(snes,ierr)

315: !     Test Fortran90 wrapper for SNESSet/Get ApplicationContext()
316:       call PetscOptionsGetTruth(PETSC_NULL_CHARACTER,'-test_appctx',             &
317:      &     flg,PETSC_NULL_CHARACTER,ierr)
318:       if (flg) then
319:         call SNESGetApplicationContext(snes,puser,ierr)
320:         if (user%da .ne. puser%da) then
321:           write(*,*) "Error: uesr != puesr"
322:           write(*,*) "user: ", user
323:           write(*,*) "puesr: ", puser
324:         endif
325:       endif

327: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
328: !  Evaluate initial guess; then solve nonlinear system.
329: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
330: !  Note: The user should initialize the vector, x, with the initial guess
331: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
332: !  to employ an initial guess of zero, the user should explicitly set
333: !  this vector to zero by calling VecSet().

335:       call FormInitialGuess(snes,x,ierr)
336:       call SNESSolve(snes,PETSC_NULL_OBJECT,x,ierr)
337:       call SNESGetIterationNumber(snes,its,ierr);
338:       if (user%rank .eq. 0) then
339:          write(6,100) its
340:       endif
341:   100 format('Number of Newton iterations = ',i5)

343: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
344: !  Free work space.  All PETSc objects should be destroyed when they
345: !  are no longer needed.
346: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
347:       if (.not. matrix_free) call MatDestroy(J,ierr)
348:       call VecDestroy(x,ierr)
349:       call VecDestroy(r,ierr)
350:       call SNESDestroy(snes,ierr)
351:       call DADestroy(user%da,ierr)

353:       call PetscFinalize(ierr)
354:       end

356: ! ---------------------------------------------------------------------
357: !
358: !  FormInitialGuess - Forms initial approximation.
359: !
360: !  Input Parameters:
361: !  X - vector
362: !
363: !  Output Parameter:
364: !  X - vector
365: !
366: !  Notes:
367: !  This routine serves as a wrapper for the lower-level routine
368: !  "InitialGuessLocal", where the actual computations are
369: !  done using the standard Fortran style of treating the local
370: !  vector data as a multidimensional array over the local mesh.
371: !  This routine merely handles ghost point scatters and accesses
372: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
373: !
374:       subroutine FormInitialGuess(snes,X,ierr)
375:       use f90module
376:       use f90moduleinterfaces
377:       implicit none

379: #include "include/finclude/petscvec.h90"
380:  #include include/finclude/petsc.h
381:  #include include/finclude/petscvec.h
382:  #include include/finclude/petscda.h
383:  #include include/finclude/petscis.h
384:  #include include/finclude/petscmat.h
385:  #include include/finclude/petscksp.h
386:  #include include/finclude/petscpc.h
387:  #include include/finclude/petscsnes.h

389: !  Input/output variables:
390:       SNES           snes
391:       type(userctx), pointer:: puser
392:       Vec            X
393:       PetscErrorCode ierr
394: 
395: !  Declarations for use with local arrays:
396:       PetscScalar,pointer :: lx_v(:)
397:       Vec               localX

399:       0
400:       call SNESGetApplicationContext(snes,puser,ierr)
401: !  Get a pointer to vector data.
402: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
403: !      the data array. Otherwise, the routine is implementation dependent.
404: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
405: !      the array.
406: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
407: !      and is useable from Fortran-90 Only.

409:       call DAGetLocalVector(puser%da,localX,ierr)
410:       call VecGetArrayF90(localX,lx_v,ierr)

412: !  Compute initial guess over the locally owned part of the grid
413:       call InitialGuessLocal(puser,lx_v,ierr)

415: !  Restore vector
416:       call VecRestoreArrayF90(localX,lx_v,ierr)

418: !  Insert values into global vector
419:       call DALocalToGlobal(puser%da,localX,INSERT_VALUES,X,ierr)
420:       call DARestoreLocalVector(puser%da,localX,ierr)

422:       return
423:       end

425: ! ---------------------------------------------------------------------
426: !
427: !  InitialGuessLocal - Computes initial approximation, called by
428: !  the higher level routine FormInitialGuess().
429: !
430: !  Input Parameter:
431: !  x - local vector data
432: !
433: !  Output Parameters:
434: !  x - local vector data
435: !  ierr - error code
436: !
437: !  Notes:
438: !  This routine uses standard Fortran-style computations over a 2-dim array.
439: !
440:       subroutine InitialGuessLocal(user,x,ierr)
441:       use f90module
442:       implicit none

444:  #include include/finclude/petsc.h
445:  #include include/finclude/petscvec.h
446:  #include include/finclude/petscda.h
447:  #include include/finclude/petscis.h
448:  #include include/finclude/petscmat.h
449:  #include include/finclude/petscksp.h
450:  #include include/finclude/petscpc.h
451:  #include include/finclude/petscsnes.h

453: !  Input/output variables:
454:       type (userctx)         user
455:       PetscScalar  x(user%gxs:user%gxe,                                 &
456:      &              user%gys:user%gye)
457:       PetscErrorCode ierr

459: !  Local variables:
460:       PetscInt  i,j
461:       PetscScalar   temp1,temp,hx,hy
462:       PetscScalar   one

464: !  Set parameters

466:       0
467:       one    = 1.0
468:       hx     = one/(dble(user%mx-1))
469:       hy     = one/(dble(user%my-1))
470:       temp1  = user%lambda/(user%lambda + one)

472:       do 20 j=user%ys,user%ye
473:          temp = dble(min(j-1,user%my-j))*hy
474:          do 10 i=user%xs,user%xe
475:             if (i .eq. 1 .or. j .eq. 1                                  &
476:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
477:               x(i,j) = 0.0
478:             else
479:               x(i,j) = temp1 *                                          &
480:      &          sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
481:             endif
482:  10      continue
483:  20   continue

485:       return
486:       end

488: ! ---------------------------------------------------------------------
489: !
490: !  FormFunctionLocal - Computes nonlinear function, called by
491: !  the higher level routine FormFunction().
492: !
493: !  Input Parameter:
494: !  x - local vector data
495: !
496: !  Output Parameters:
497: !  f - local vector data, f(x)
498: !  ierr - error code
499: !
500: !  Notes:
501: !  This routine uses standard Fortran-style computations over a 2-dim array.
502: !
503:       subroutine FormFunctionLocal(x,f,user,ierr)
504:       use f90module

506:       implicit none

508: !  Input/output variables:
509:       type (userctx) user
510:       PetscScalar  x(user%gxs:user%gxe,                                         &
511:      &              user%gys:user%gye)
512:       PetscScalar  f(user%xs:user%xe,                                           &
513:      &              user%ys:user%ye)
514:       PetscErrorCode ierr

516: !  Local variables:
517:       PetscScalar   two,one,hx,hy,hxdhy,hydhx,sc
518:       PetscScalar   u,uxx,uyy
519:       PetscInt  i,j

521:       one    = 1.0
522:       two    = 2.0
523:       hx     = one/dble(user%mx-1)
524:       hy     = one/dble(user%my-1)
525:       sc     = hx*hy*user%lambda
526:       hxdhy  = hx/hy
527:       hydhx  = hy/hx

529: !  Compute function over the locally owned part of the grid

531:       do 20 j=user%ys,user%ye
532:          do 10 i=user%xs,user%xe
533:             if (i .eq. 1 .or. j .eq. 1                                  &
534:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
535:                f(i,j) = x(i,j)
536:             else
537:                u = x(i,j)
538:                uxx = hydhx * (two*u                                     &
539:      &                - x(i-1,j) - x(i+1,j))
540:                uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
541:                f(i,j) = uxx + uyy - sc*exp(u)
542:             endif
543:  10      continue
544:  20   continue

546:       return
547:       end

549: ! ---------------------------------------------------------------------
550: !
551: !  FormJacobian - Evaluates Jacobian matrix.
552: !
553: !  Input Parameters:
554: !  snes     - the SNES context
555: !  x        - input vector
556: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
557: !             (not used here)
558: !
559: !  Output Parameters:
560: !  jac      - Jacobian matrix
561: !  jac_prec - optionally different preconditioning matrix (not used here)
562: !  flag     - flag indicating matrix structure
563: !
564: !  Notes:
565: !  This routine serves as a wrapper for the lower-level routine
566: !  "FormJacobianLocal", where the actual computations are
567: !  done using the standard Fortran style of treating the local
568: !  vector data as a multidimensional array over the local mesh.
569: !  This routine merely accesses the local vector data via
570: !  VecGetArrayF90() and VecRestoreArrayF90().
571: !
572: !  Notes:
573: !  Due to grid point reordering with DAs, we must always work
574: !  with the local grid points, and then transform them to the new
575: !  global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
576: !  We cannot work directly with the global numbers for the original
577: !  uniprocessor grid!
578: !
579: !  Two methods are available for imposing this transformation
580: !  when setting matrix entries:
581: !    (A) MatSetValuesLocal(), using the local ordering (including
582: !        ghost points!)
583: !        - Use DAGetGlobalIndicesF90() to extract the local-to-global map
584: !        - Associate this map with the matrix by calling
585: !          MatSetLocalToGlobalMapping() once
586: !        - Set matrix entries using the local ordering
587: !          by calling MatSetValuesLocal()
588: !    (B) MatSetValues(), using the global ordering
589: !        - Use DAGetGlobalIndicesF90() to extract the local-to-global map
590: !        - Then apply this map explicitly yourself
591: !        - Set matrix entries using the global ordering by calling
592: !          MatSetValues()
593: !  Option (A) seems cleaner/easier in many cases, and is the procedure
594: !  used in this example.
595: !
596:       subroutine FormJacobian(snes,X,jac,jac_prec,flag,user,ierr)
597:       use f90module
598:       implicit none

600:  #include include/finclude/petsc.h
601:  #include include/finclude/petscvec.h
602:  #include include/finclude/petscda.h
603:  #include include/finclude/petscis.h
604:  #include include/finclude/petscmat.h
605:  #include include/finclude/petscksp.h
606:  #include include/finclude/petscpc.h
607:  #include include/finclude/petscsnes.h

609: #include "include/finclude/petscvec.h90"

611: !  Input/output variables:
612:       SNES         snes
613:       Vec          X
614:       Mat          jac,jac_prec
615:       MatStructure flag
616:       type(userctx)  user
617:       PetscErrorCode ierr

619: !  Declarations for use with local arrays:
620:       PetscScalar,pointer :: lx_v(:)
621:       Vec            localX

623: !  Scatter ghost points to local vector, using the 2-step process
624: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd()
625: !  Computations can be done while messages are in transition,
626: !  by placing code between these two statements.

628:       call DAGetLocalVector(user%da,localX,ierr)
629:       call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX,            &
630:      &     ierr)
631:       call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)

633: !  Get a pointer to vector data
634:       call VecGetArrayF90(localX,lx_v,ierr)

636: !  Compute entries for the locally owned part of the Jacobian preconditioner.
637:       call FormJacobianLocal(lx_v,jac_prec,user,ierr)

639: !  Assemble matrix, using the 2-step process:
640: !     MatAssemblyBegin(), MatAssemblyEnd()
641: !  Computations can be done while messages are in transition,
642: !  by placing code between these two statements.

644:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
645:       if (jac .ne. jac_prec) then
646:          call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
647:       endif
648:       call VecRestoreArrayF90(localX,lx_v,ierr)
649:       call DARestoreLocalVector(user%da,localX,ierr)
650:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
651:       if (jac .ne. jac_prec) then
652:         call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
653:       endif
654: 
655: !  Set flag to indicate that the Jacobian matrix retains an identical
656: !  nonzero structure throughout all nonlinear iterations (although the
657: !  values of the entries change). Thus, we can save some work in setting
658: !  up the preconditioner (e.g., no need to redo symbolic factorization for
659: !  ILU/ICC preconditioners).
660: !   - If the nonzero structure of the matrix is different during
661: !     successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
662: !     must be used instead.  If you are unsure whether the matrix
663: !     structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
664: !   - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
665: !     believes your assertion and does not check the structure
666: !     of the matrix.  If you erroneously claim that the structure
667: !     is the same when it actually is not, the new preconditioner
668: !     will not function correctly.  Thus, use this optimization
669: !     feature with caution!

671:       flag = SAME_NONZERO_PATTERN

673: !  Tell the matrix we will never add a new nonzero location to the
674: !  matrix. If we do it will generate an error.

676:       call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,      &
677:      &                  ierr)

679:       return
680:       end

682: ! ---------------------------------------------------------------------
683: !
684: !  FormJacobianLocal - Computes Jacobian preconditioner matrix,
685: !  called by the higher level routine FormJacobian().
686: !
687: !  Input Parameters:
688: !  x        - local vector data
689: !
690: !  Output Parameters:
691: !  jac_prec - Jacobian preconditioner matrix
692: !  ierr     - error code
693: !
694: !  Notes:
695: !  This routine uses standard Fortran-style computations over a 2-dim array.
696: !
697: !  Notes:
698: !  Due to grid point reordering with DAs, we must always work
699: !  with the local grid points, and then transform them to the new
700: !  global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
701: !  We cannot work directly with the global numbers for the original
702: !  uniprocessor grid!
703: !
704: !  Two methods are available for imposing this transformation
705: !  when setting matrix entries:
706: !    (A) MatSetValuesLocal(), using the local ordering (including
707: !        ghost points!)
708: !        - Use DAGetGlobalIndicesF90() to extract the local-to-global map
709: !        - Associate this map with the matrix by calling
710: !          MatSetLocalToGlobalMapping() once
711: !        - Set matrix entries using the local ordering
712: !          by calling MatSetValuesLocal()
713: !    (B) MatSetValues(), using the global ordering
714: !        - Use DAGetGlobalIndicesF90() to extract the local-to-global map
715: !        - Then apply this map explicitly yourself
716: !        - Set matrix entries using the global ordering by calling
717: !          MatSetValues()
718: !  Option (A) seems cleaner/easier in many cases, and is the procedure
719: !  used in this example.
720: !
721:       subroutine FormJacobianLocal(x,jac_prec,user,ierr)
722:       use f90module
723:       implicit none

725:  #include include/finclude/petsc.h
726:  #include include/finclude/petscvec.h
727:  #include include/finclude/petscda.h
728:  #include include/finclude/petscis.h
729:  #include include/finclude/petscmat.h
730:  #include include/finclude/petscksp.h
731:  #include include/finclude/petscpc.h
732:  #include include/finclude/petscsnes.h

734: !  Input/output variables:
735:       type (userctx) user
736:       PetscScalar    x(user%gxs:user%gxe,                                      &
737:      &               user%gys:user%gye)
738:       Mat            jac_prec
739:       PetscErrorCode ierr

741: !  Local variables:
742:       PetscInt      row,col(5),i,j
743:       PetscInt      ione,ifive
744:       PetscScalar   two,one,hx,hy,hxdhy
745:       PetscScalar   hydhx,sc,v(5)

747: !  Set parameters
748:       ione   = 1
749:       ifive  = 5
750:       one    = 1.0
751:       two    = 2.0
752:       hx     = one/dble(user%mx-1)
753:       hy     = one/dble(user%my-1)
754:       sc     = hx*hy
755:       hxdhy  = hx/hy
756:       hydhx  = hy/hx

758: !  Compute entries for the locally owned part of the Jacobian.
759: !   - Currently, all PETSc parallel matrix formats are partitioned by
760: !     contiguous chunks of rows across the processors.
761: !   - Each processor needs to insert only elements that it owns
762: !     locally (but any non-local elements will be sent to the
763: !     appropriate processor during matrix assembly).
764: !   - Here, we set all entries for a particular row at once.
765: !   - We can set matrix entries either using either
766: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
767: !   - Note that MatSetValues() uses 0-based row and column numbers
768: !     in Fortran as well as in C.

770:       do 20 j=user%ys,user%ye
771:          row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
772:          do 10 i=user%xs,user%xe
773:             row = row + 1
774: !           boundary points
775:             if (i .eq. 1 .or. j .eq. 1                                  &
776:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
777:                col(1) = row
778:                v(1)   = one
779:                call MatSetValuesLocal(jac_prec,ione,row,ione,col,v,          &
780:      &                           INSERT_VALUES,ierr)
781: !           interior grid points
782:             else
783:                v(1) = -hxdhy
784:                v(2) = -hydhx
785:                v(3) = two*(hydhx + hxdhy)                               &
786:      &                  - sc*user%lambda*exp(x(i,j))
787:                v(4) = -hydhx
788:                v(5) = -hxdhy
789:                col(1) = row - user%gxm
790:                col(2) = row - 1
791:                col(3) = row
792:                col(4) = row + 1
793:                col(5) = row + user%gxm
794:                call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,         &
795:      &                                INSERT_VALUES,ierr)
796:             endif
797:  10      continue
798:  20   continue

800:       return
801:       end