1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DMDAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !/*T
10: ! Concepts: SNES^parallel Bratu example
11: ! Concepts: DMDA^using distributed arrays;
12: ! Processors: n
13: !T*/
14: !
15: ! --------------------------------------------------------------------------
16: !
17: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
18: ! the partial differential equation
19: !
20: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
21: !
22: ! with boundary conditions
23: !
24: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
25: !
26: ! A finite difference approximation with the usual 5-point stencil
27: ! is used to discretize the boundary value problem to obtain a nonlinear
28: ! system of equations.
29: !
30: ! The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
31: !
32: ! --------------------------------------------------------------------------
33: ! The following define must be used before including any PETSc include files
34: ! into a module or interface. This is because they can't handle declarations
35: ! in them
36: !
38: module f90module
39: type userctx
40: #include <finclude/petscsysdef.h>
41: #include <finclude/petscvecdef.h>
42: #include <finclude/petscdmdef.h>
43: PetscInt xs,xe,xm,gxs,gxe,gxm
44: PetscInt ys,ye,ym,gys,gye,gym
45: PetscInt mx,my
46: PetscMPIInt rank
47: PetscReal lambda
48: end type userctx
50: contains
51: ! ---------------------------------------------------------------------
52: !
53: ! FormFunction - Evaluates nonlinear function, F(x).
54: !
55: ! Input Parameters:
56: ! snes - the SNES context
57: ! X - input vector
58: ! dummy - optional user-defined context, as set by SNESSetFunction()
59: ! (not used here)
60: !
61: ! Output Parameter:
62: ! F - function vector
63: !
64: ! Notes:
65: ! This routine serves as a wrapper for the lower-level routine
66: ! "FormFunctionLocal", where the actual computations are
67: ! done using the standard Fortran style of treating the local
68: ! vector data as a multidimensional array over the local mesh.
69: ! This routine merely handles ghost point scatters and accesses
70: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
71: !
72: subroutine FormFunction(snes,X,F,user,ierr)
73: implicit none
75: #include <finclude/petscsys.h>
76: #include <finclude/petscvec.h>
77: #include <finclude/petscdmda.h>
78: #include <finclude/petscis.h>
79: #include <finclude/petscmat.h>
80: #include <finclude/petscksp.h>
81: #include <finclude/petscpc.h>
82: #include <finclude/petscsnes.h>
83: #include <finclude/petscvec.h90>
84: #include <finclude/petscsnes.h90>
86: ! Input/output variables:
87: SNES snes
88: Vec X,F
89: PetscErrorCode ierr
90: type (userctx) user
91: DM da
93: ! Declarations for use with local arrays:
94: PetscScalar,pointer :: lx_v(:),lf_v(:)
95: Vec localX
97: ! Scatter ghost points to local vector, using the 2-step process
98: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
99: ! By placing code between these two statements, computations can
100: ! be done while messages are in transition.
101: call SNESGetDM(snes,da,ierr)
102: call DMGetLocalVector(da,localX,ierr)
103: call DMGlobalToLocalBegin(da,X,INSERT_VALUES, &
104: & localX,ierr)
105: call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)
107: ! Get a pointer to vector data.
108: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
109: ! the data array. Otherwise, the routine is implementation dependent.
110: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
111: ! the array.
112: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
113: ! and is useable from Fortran-90 Only.
115: call VecGetArrayF90(localX,lx_v,ierr)
116: call VecGetArrayF90(F,lf_v,ierr)
118: ! Compute function over the locally owned part of the grid
119: call FormFunctionLocal(lx_v,lf_v,user,ierr)
121: ! Restore vectors
122: call VecRestoreArrayF90(localX,lx_v,ierr)
123: call VecRestoreArrayF90(F,lf_v,ierr)
125: ! Insert values into global vector
127: call DMRestoreLocalVector(da,localX,ierr)
128: call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)
130: ! call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
131: ! call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
132: return
133: end subroutine formfunction
134: end module f90module
136: module f90moduleinterfaces
137: use f90module
139: Interface SNESSetApplicationContext140: Subroutine SNESSetApplicationContext(snes,ctx,ierr)
141: use f90module
142: SNES snes
143: type(userctx) ctx
144: PetscErrorCode ierr
145: End Subroutine
146: End Interface SNESSetApplicationContext148: Interface SNESGetApplicationContext149: Subroutine SNESGetApplicationContext(snes,ctx,ierr)
150: use f90module
151: SNES snes
152: type(userctx), pointer :: ctx
153: PetscErrorCode ierr
154: End Subroutine
155: End Interface SNESGetApplicationContext156: end module f90moduleinterfaces
158: program main
159: use f90module
160: use f90moduleinterfaces
161: implicit none
162: !
163: #include <finclude/petscsys.h>
164: #include <finclude/petscvec.h>
165: #include <finclude/petscdmda.h>
166: #include <finclude/petscis.h>
167: #include <finclude/petscmat.h>
168: #include <finclude/petscksp.h>
169: #include <finclude/petscpc.h>
170: #include <finclude/petscsnes.h>
171: #include <finclude/petscvec.h90>
172: #include <finclude/petscdmda.h90>
174: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: ! Variable declarations
176: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: !
178: ! Variables:
179: ! snes - nonlinear solver
180: ! x, r - solution, residual vectors
181: ! J - Jacobian matrix
182: ! its - iterations for convergence
183: ! Nx, Ny - number of preocessors in x- and y- directions
184: ! matrix_free - flag - 1 indicates matrix-free version
185: !
186: SNES snes
187: Vec x,r
188: Mat J
189: PetscErrorCode ierr
190: PetscInt its
191: PetscBool flg,matrix_free
192: PetscInt ione,nfour
193: PetscReal lambda_max,lambda_min
194: type (userctx) user
195: DM da
197: ! Note: Any user-defined Fortran routines (such as FormJacobian)
198: ! MUST be declared as external.
199: external FormInitialGuess,FormJacobian
201: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202: ! Initialize program
203: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
204: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
205: call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)
207: ! Initialize problem parameters
208: lambda_max = 6.81
209: lambda_min = 0.0
210: user%lambda = 6.0
211: ione = 1
212: nfour = -4
213: call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par', &
214: & user%lambda,flg,ierr)
215: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
216: & then
217: if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
218: SETERRQ(PETSC_COMM_SELF,1,' ',ierr)
219: endif
221: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
222: ! Create nonlinear solver context
223: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: call SNESCreate(PETSC_COMM_WORLD,snes,ierr)
226: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
227: ! Create vector data structures; set function evaluation routine
228: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
230: ! Create distributed array (DMDA) to manage parallel grid and vectors
232: ! This really needs only the star-type stencil, but we use the box
233: ! stencil temporarily.
234: call DMDACreate2d(PETSC_COMM_WORLD, &
235: & DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE, &
236: & DMDA_STENCIL_BOX,nfour,nfour,PETSC_DECIDE,PETSC_DECIDE, &
237: & ione,ione,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,da,ierr)
238: call DMDAGetInfo(da,PETSC_NULL_INTEGER,user%mx,user%my, &
239: & PETSC_NULL_INTEGER, &
240: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
241: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
242: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
243: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
244: & PETSC_NULL_INTEGER,ierr)
246: !
247: ! Visualize the distribution of the array across the processors
248: !
249: ! call DMView(da,PETSC_VIEWER_DRAW_WORLD,ierr)
251: ! Extract global and local vectors from DMDA; then duplicate for remaining
252: ! vectors that are the same types
253: call DMCreateGlobalVector(da,x,ierr)
254: call VecDuplicate(x,r,ierr)
256: ! Get local grid boundaries (for 2-dimensional DMDA)
257: call DMDAGetCorners(da,user%xs,user%ys,PETSC_NULL_INTEGER, &
258: & user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
259: call DMDAGetGhostCorners(da,user%gxs,user%gys, &
260: & PETSC_NULL_INTEGER,user%gxm,user%gym, &
261: & PETSC_NULL_INTEGER,ierr)
263: ! Here we shift the starting indices up by one so that we can easily
264: ! use the Fortran convention of 1-based indices (rather 0-based indices).
265: user%xs = user%xs+1
266: user%ys = user%ys+1
267: user%gxs = user%gxs+1
268: user%gys = user%gys+1
270: user%ye = user%ys+user%ym-1
271: user%xe = user%xs+user%xm-1
272: user%gye = user%gys+user%gym-1
273: user%gxe = user%gxs+user%gxm-1
275: call SNESSetApplicationContext(snes,user,ierr)
277: ! Set function evaluation routine and vector
278: call SNESSetFunction(snes,r,FormFunction,user,ierr)
280: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
281: ! Create matrix data structure; set Jacobian evaluation routine
282: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
284: ! Set Jacobian matrix data structure and default Jacobian evaluation
285: ! routine. User can override with:
286: ! -snes_fd : default finite differencing approximation of Jacobian
287: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
288: ! (unless user explicitly sets preconditioner)
289: ! -snes_mf_operator : form preconditioning matrix as set by the user,
290: ! but use matrix-free approx for Jacobian-vector
291: ! products within Newton-Krylov method
292: !
293: ! Note: For the parallel case, vectors and matrices MUST be partitioned
294: ! accordingly. When using distributed arrays (DMDAs) to create vectors,
295: ! the DMDAs determine the problem partitioning. We must explicitly
296: ! specify the local matrix dimensions upon its creation for compatibility
297: ! with the vector distribution. Thus, the generic MatCreate() routine
298: ! is NOT sufficient when working with distributed arrays.
299: !
300: ! Note: Here we only approximately preallocate storage space for the
301: ! Jacobian. See the users manual for a discussion of better techniques
302: ! for preallocating matrix memory.
304: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf', &
305: & matrix_free,ierr)
306: if (.not. matrix_free) then
307: call DMSetMatType(da,MATAIJ,ierr)
308: call DMCreateMatrix(da,J,ierr)
309: call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
310: endif
312: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
313: ! Customize nonlinear solver; set runtime options
314: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
315: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
316: call SNESSetDM(snes,da,ierr)
317: call SNESSetFromOptions(snes,ierr)
320: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
321: ! Evaluate initial guess; then solve nonlinear system.
322: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
323: ! Note: The user should initialize the vector, x, with the initial guess
324: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
325: ! to employ an initial guess of zero, the user should explicitly set
326: ! this vector to zero by calling VecSet().
328: call FormInitialGuess(snes,x,ierr)
329: call SNESSolve(snes,PETSC_NULL_OBJECT,x,ierr)
330: call SNESGetIterationNumber(snes,its,ierr);
331: if (user%rank .eq. 0) then
332: write(6,100) its
333: endif
334: 100 format('Number of SNES iterations = ',i5)
336: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
337: ! Free work space. All PETSc objects should be destroyed when they
338: ! are no longer needed.
339: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
340: if (.not. matrix_free) call MatDestroy(J,ierr)
341: call VecDestroy(x,ierr)
342: call VecDestroy(r,ierr)
343: call SNESDestroy(snes,ierr)
344: call DMDestroy(da,ierr)
346: call PetscFinalize(ierr)
347: end
349: ! ---------------------------------------------------------------------
350: !
351: ! FormInitialGuess - Forms initial approximation.
352: !
353: ! Input Parameters:
354: ! X - vector
355: !
356: ! Output Parameter:
357: ! X - vector
358: !
359: ! Notes:
360: ! This routine serves as a wrapper for the lower-level routine
361: ! "InitialGuessLocal", where the actual computations are
362: ! done using the standard Fortran style of treating the local
363: ! vector data as a multidimensional array over the local mesh.
364: ! This routine merely handles ghost point scatters and accesses
365: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
366: !
367: subroutine FormInitialGuess(snes,X,ierr)
368: use f90module
369: use f90moduleinterfaces
370: implicit none
372: #include <finclude/petscvec.h90>
373: #include <finclude/petscsys.h>
374: #include <finclude/petscvec.h>
375: #include <finclude/petscdmda.h>
376: #include <finclude/petscis.h>
377: #include <finclude/petscmat.h>
378: #include <finclude/petscksp.h>
379: #include <finclude/petscpc.h>
380: #include <finclude/petscsnes.h>
382: ! Input/output variables:
383: SNES snes
384: type(userctx), pointer:: puser
385: Vec X
386: PetscErrorCode ierr
387: DM da
389: ! Declarations for use with local arrays:
390: PetscScalar,pointer :: lx_v(:)
391: Vec localX
393: 0
394: call SNESGetDM(snes,da,ierr)
395: call SNESGetApplicationContext(snes,puser,ierr)
396: ! Get a pointer to vector data.
397: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
398: ! the data array. Otherwise, the routine is implementation dependent.
399: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
400: ! the array.
401: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
402: ! and is useable from Fortran-90 Only.
404: call DMGetLocalVector(da,localX,ierr)
405: call VecGetArrayF90(localX,lx_v,ierr)
407: ! Compute initial guess over the locally owned part of the grid
408: call InitialGuessLocal(puser,lx_v,ierr)
410: ! Restore vector
411: call VecRestoreArrayF90(localX,lx_v,ierr)
413: ! Insert values into global vector
414: call DMLocalToGlobalBegin(da,localX,INSERT_VALUES,X,ierr)
415: call DMLocalToGlobalEnd(da,localX,INSERT_VALUES,X,ierr)
416: call DMRestoreLocalVector(da,localX,ierr)
418: return
419: end
421: ! ---------------------------------------------------------------------
422: !
423: ! InitialGuessLocal - Computes initial approximation, called by
424: ! the higher level routine FormInitialGuess().
425: !
426: ! Input Parameter:
427: ! x - local vector data
428: !
429: ! Output Parameters:
430: ! x - local vector data
431: ! ierr - error code
432: !
433: ! Notes:
434: ! This routine uses standard Fortran-style computations over a 2-dim array.
435: !
436: subroutine InitialGuessLocal(user,x,ierr)
437: use f90module
438: implicit none
440: #include <finclude/petscsys.h>
441: #include <finclude/petscvec.h>
442: #include <finclude/petscdmda.h>
443: #include <finclude/petscis.h>
444: #include <finclude/petscmat.h>
445: #include <finclude/petscksp.h>
446: #include <finclude/petscpc.h>
447: #include <finclude/petscsnes.h>
449: ! Input/output variables:
450: type (userctx) user
451: PetscScalar x(user%gxs:user%gxe, &
452: & user%gys:user%gye)
453: PetscErrorCode ierr
455: ! Local variables:
456: PetscInt i,j
457: PetscScalar temp1,temp,hx,hy
458: PetscScalar one
460: ! Set parameters
462: 0
463: one = 1.0
464: hx = one/(dble(user%mx-1))
465: hy = one/(dble(user%my-1))
466: temp1 = user%lambda/(user%lambda + one)
468: do 20 j=user%ys,user%ye
469: temp = dble(min(j-1,user%my-j))*hy
470: do 10 i=user%xs,user%xe
471: if (i .eq. 1 .or. j .eq. 1 &
472: & .or. i .eq. user%mx .or. j .eq. user%my) then
473: x(i,j) = 0.0
474: else
475: x(i,j) = temp1 * &
476: & sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
477: endif
478: 10 continue
479: 20 continue
481: return
482: end
484: ! ---------------------------------------------------------------------
485: !
486: ! FormFunctionLocal - Computes nonlinear function, called by
487: ! the higher level routine FormFunction().
488: !
489: ! Input Parameter:
490: ! x - local vector data
491: !
492: ! Output Parameters:
493: ! f - local vector data, f(x)
494: ! ierr - error code
495: !
496: ! Notes:
497: ! This routine uses standard Fortran-style computations over a 2-dim array.
498: !
499: subroutine FormFunctionLocal(x,f,user,ierr)
500: use f90module
502: implicit none
504: ! Input/output variables:
505: type (userctx) user
506: PetscScalar x(user%gxs:user%gxe, &
507: & user%gys:user%gye)
508: PetscScalar f(user%xs:user%xe, &
509: & user%ys:user%ye)
510: PetscErrorCode ierr
512: ! Local variables:
513: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
514: PetscScalar u,uxx,uyy
515: PetscInt i,j
517: one = 1.0
518: two = 2.0
519: hx = one/dble(user%mx-1)
520: hy = one/dble(user%my-1)
521: sc = hx*hy*user%lambda
522: hxdhy = hx/hy
523: hydhx = hy/hx
525: ! Compute function over the locally owned part of the grid
527: do 20 j=user%ys,user%ye
528: do 10 i=user%xs,user%xe
529: if (i .eq. 1 .or. j .eq. 1 &
530: & .or. i .eq. user%mx .or. j .eq. user%my) then
531: f(i,j) = x(i,j)
532: else
533: u = x(i,j)
534: uxx = hydhx * (two*u &
535: & - x(i-1,j) - x(i+1,j))
536: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
537: f(i,j) = uxx + uyy - sc*exp(u)
538: endif
539: 10 continue
540: 20 continue
542: return
543: end
545: ! ---------------------------------------------------------------------
546: !
547: ! FormJacobian - Evaluates Jacobian matrix.
548: !
549: ! Input Parameters:
550: ! snes - the SNES context
551: ! x - input vector
552: ! dummy - optional user-defined context, as set by SNESSetJacobian()
553: ! (not used here)
554: !
555: ! Output Parameters:
556: ! jac - Jacobian matrix
557: ! jac_prec - optionally different preconditioning matrix (not used here)
558: ! flag - flag indicating matrix structure
559: !
560: ! Notes:
561: ! This routine serves as a wrapper for the lower-level routine
562: ! "FormJacobianLocal", where the actual computations are
563: ! done using the standard Fortran style of treating the local
564: ! vector data as a multidimensional array over the local mesh.
565: ! This routine merely accesses the local vector data via
566: ! VecGetArrayF90() and VecRestoreArrayF90().
567: !
568: ! Notes:
569: ! Due to grid point reordering with DMDAs, we must always work
570: ! with the local grid points, and then transform them to the new
571: ! global numbering with the "ltog" mapping (via DMDAGetGlobalIndicesF90()).
572: ! We cannot work directly with the global numbers for the original
573: ! uniprocessor grid!
574: !
575: ! Two methods are available for imposing this transformation
576: ! when setting matrix entries:
577: ! (A) MatSetValuesLocal(), using the local ordering (including
578: ! ghost points!)
579: ! - Use DMDAGetGlobalIndicesF90() to extract the local-to-global map
580: ! - Associate this map with the matrix by calling
581: ! MatSetLocalToGlobalMapping() once
582: ! - Set matrix entries using the local ordering
583: ! by calling MatSetValuesLocal()
584: ! (B) MatSetValues(), using the global ordering
585: ! - Use DMDAGetGlobalIndicesF90() to extract the local-to-global map
586: ! - Then apply this map explicitly yourself
587: ! - Set matrix entries using the global ordering by calling
588: ! MatSetValues()
589: ! Option (A) seems cleaner/easier in many cases, and is the procedure
590: ! used in this example.
591: !
592: subroutine FormJacobian(snes,X,jac,jac_prec,flag,user,ierr)
593: use f90module
594: implicit none
596: #include <finclude/petscsys.h>
597: #include <finclude/petscvec.h>
598: #include <finclude/petscdmda.h>
599: #include <finclude/petscis.h>
600: #include <finclude/petscmat.h>
601: #include <finclude/petscksp.h>
602: #include <finclude/petscpc.h>
603: #include <finclude/petscsnes.h>
605: #include <finclude/petscvec.h90>
607: ! Input/output variables:
608: SNES snes
609: Vec X
610: Mat jac,jac_prec
611: MatStructure flag
612: type(userctx) user
613: PetscErrorCode ierr
614: DM da
616: ! Declarations for use with local arrays:
617: PetscScalar,pointer :: lx_v(:)
618: Vec localX
620: ! Scatter ghost points to local vector, using the 2-step process
621: ! DMGlobalToLocalBegin(), DMGlobalToLocalEnd()
622: ! Computations can be done while messages are in transition,
623: ! by placing code between these two statements.
625: call SNESGetDM(snes,da,ierr)
626: call DMGetLocalVector(da,localX,ierr)
627: call DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX, &
628: & ierr)
629: call DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)
631: ! Get a pointer to vector data
632: call VecGetArrayF90(localX,lx_v,ierr)
634: ! Compute entries for the locally owned part of the Jacobian preconditioner.
635: call FormJacobianLocal(lx_v,jac_prec,user,ierr)
637: ! Assemble matrix, using the 2-step process:
638: ! MatAssemblyBegin(), MatAssemblyEnd()
639: ! Computations can be done while messages are in transition,
640: ! by placing code between these two statements.
642: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
643: if (jac .ne. jac_prec) then
644: call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
645: endif
646: call VecRestoreArrayF90(localX,lx_v,ierr)
647: call DMRestoreLocalVector(da,localX,ierr)
648: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
649: if (jac .ne. jac_prec) then
650: call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
651: endif
653: ! Set flag to indicate that the Jacobian matrix retains an identical
654: ! nonzero structure throughout all nonlinear iterations (although the
655: ! values of the entries change). Thus, we can save some work in setting
656: ! up the preconditioner (e.g., no need to redo symbolic factorization for
657: ! ILU/ICC preconditioners).
658: ! - If the nonzero structure of the matrix is different during
659: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
660: ! must be used instead. If you are unsure whether the matrix
661: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
662: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
663: ! believes your assertion and does not check the structure
664: ! of the matrix. If you erroneously claim that the structure
665: ! is the same when it actually is not, the new preconditioner
666: ! will not function correctly. Thus, use this optimization
667: ! feature with caution!
669: flag = SAME_NONZERO_PATTERN
671: ! Tell the matrix we will never add a new nonzero location to the
672: ! matrix. If we do it will generate an error.
674: call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE, &
675: & ierr)
677: return
678: end
680: ! ---------------------------------------------------------------------
681: !
682: ! FormJacobianLocal - Computes Jacobian preconditioner matrix,
683: ! called by the higher level routine FormJacobian().
684: !
685: ! Input Parameters:
686: ! x - local vector data
687: !
688: ! Output Parameters:
689: ! jac_prec - Jacobian preconditioner matrix
690: ! ierr - error code
691: !
692: ! Notes:
693: ! This routine uses standard Fortran-style computations over a 2-dim array.
694: !
695: ! Notes:
696: ! Due to grid point reordering with DMDAs, we must always work
697: ! with the local grid points, and then transform them to the new
698: ! global numbering with the "ltog" mapping (via DMDAGetGlobalIndicesF90()).
699: ! We cannot work directly with the global numbers for the original
700: ! uniprocessor grid!
701: !
702: ! Two methods are available for imposing this transformation
703: ! when setting matrix entries:
704: ! (A) MatSetValuesLocal(), using the local ordering (including
705: ! ghost points!)
706: ! - Use DMDAGetGlobalIndicesF90() to extract the local-to-global map
707: ! - Associate this map with the matrix by calling
708: ! MatSetLocalToGlobalMapping() once
709: ! - Set matrix entries using the local ordering
710: ! by calling MatSetValuesLocal()
711: ! (B) MatSetValues(), using the global ordering
712: ! - Use DMDAGetGlobalIndicesF90() to extract the local-to-global map
713: ! - Then apply this map explicitly yourself
714: ! - Set matrix entries using the global ordering by calling
715: ! MatSetValues()
716: ! Option (A) seems cleaner/easier in many cases, and is the procedure
717: ! used in this example.
718: !
719: subroutine FormJacobianLocal(x,jac_prec,user,ierr)
720: use f90module
721: implicit none
723: #include <finclude/petscsys.h>
724: #include <finclude/petscvec.h>
725: #include <finclude/petscdmda.h>
726: #include <finclude/petscis.h>
727: #include <finclude/petscmat.h>
728: #include <finclude/petscksp.h>
729: #include <finclude/petscpc.h>
730: #include <finclude/petscsnes.h>
732: ! Input/output variables:
733: type (userctx) user
734: PetscScalar x(user%gxs:user%gxe, &
735: & user%gys:user%gye)
736: Mat jac_prec
737: PetscErrorCode ierr
739: ! Local variables:
740: PetscInt row,col(5),i,j
741: PetscInt ione,ifive
742: PetscScalar two,one,hx,hy,hxdhy
743: PetscScalar hydhx,sc,v(5)
745: ! Set parameters
746: ione = 1
747: ifive = 5
748: one = 1.0
749: two = 2.0
750: hx = one/dble(user%mx-1)
751: hy = one/dble(user%my-1)
752: sc = hx*hy
753: hxdhy = hx/hy
754: hydhx = hy/hx
756: ! Compute entries for the locally owned part of the Jacobian.
757: ! - Currently, all PETSc parallel matrix formats are partitioned by
758: ! contiguous chunks of rows across the processors.
759: ! - Each processor needs to insert only elements that it owns
760: ! locally (but any non-local elements will be sent to the
761: ! appropriate processor during matrix assembly).
762: ! - Here, we set all entries for a particular row at once.
763: ! - We can set matrix entries either using either
764: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
765: ! - Note that MatSetValues() uses 0-based row and column numbers
766: ! in Fortran as well as in C.
768: do 20 j=user%ys,user%ye
769: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
770: do 10 i=user%xs,user%xe
771: row = row + 1
772: ! boundary points
773: if (i .eq. 1 .or. j .eq. 1 &
774: & .or. i .eq. user%mx .or. j .eq. user%my) then
775: col(1) = row
776: v(1) = one
777: call MatSetValuesLocal(jac_prec,ione,row,ione,col,v, &
778: & INSERT_VALUES,ierr)
779: ! interior grid points
780: else
781: v(1) = -hxdhy
782: v(2) = -hydhx
783: v(3) = two*(hydhx + hxdhy) &
784: & - sc*user%lambda*exp(x(i,j))
785: v(4) = -hydhx
786: v(5) = -hxdhy
787: col(1) = row - user%gxm
788: col(2) = row - 1
789: col(3) = row
790: col(4) = row + 1
791: col(5) = row + user%gxm
792: call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v, &
793: & INSERT_VALUES,ierr)
794: endif
795: 10 continue
796: 20 continue
798: return
799: end