Actual source code: ex5f90.F
1: ! "$Id: ex5f90.F,v 1.39 2001/03/19 21:30:56 bsmith Exp $";
2: !
3: ! Description: Solves a nonlinear system in parallel with SNES.
4: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
5: ! domain, using distributed arrays (DAs) to partition the parallel grid.
6: ! The command line options include:
7: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
8: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
9: !
10: !/*T
11: ! Concepts: SNES^parallel Bratu example
12: ! Concepts: DA^using distributed arrays;
13: ! Processors: n
14: !T*/
15: !
16: ! --------------------------------------------------------------------------
17: !
18: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
19: ! the partial differential equation
20: !
21: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
22: !
23: ! with boundary conditions
24: !
25: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
26: !
27: ! A finite difference approximation with the usual 5-point stencil
28: ! is used to discretize the boundary value problem to obtain a nonlinear
29: ! system of equations.
30: !
31: ! The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
32: !
33: ! --------------------------------------------------------------------------
34: ! The following define must be used before including any PETSc include files
35: ! into a module or interface. This is because they can't handle declarations
36: ! in them
37: !
39: module f90module
40: type userctx
41: #define PETSC_AVOID_DECLARATIONS
42: #include include/finclude/petsc.h
43: #include include/finclude/petscvec.h
44: #include include/finclude/petscda.h
45: #undef PETSC_AVOID_DECLARATIONS
46: DA da
47: integer xs,xe,xm,gxs,gxe,gxm
48: integer ys,ye,ym,gys,gye,gym
49: integer mx,my,rank,size
50: double precision lambda
51: end type userctx
52: contains
53: ! ---------------------------------------------------------------------
54: !
55: ! FormFunction - Evaluates nonlinear function, F(x).
56: !
57: ! Input Parameters:
58: ! snes - the SNES context
59: ! X - input vector
60: ! dummy - optional user-defined context, as set by SNESSetFunction()
61: ! (not used here)
62: !
63: ! Output Parameter:
64: ! F - function vector
65: !
66: ! Notes:
67: ! This routine serves as a wrapper for the lower-level routine
68: ! "ApplicationFunction", where the actual computations are
69: ! done using the standard Fortran style of treating the local
70: ! vector data as a multidimensional array over the local mesh.
71: ! This routine merely handles ghost point scatters and accesses
72: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
73: !
74: subroutine FormFunction(snes,X,F,user,ierr)
75: implicit none
77: #include include/finclude/petsc.h
78: #include include/finclude/petscvec.h
79: #include include/finclude/petscda.h
80: #include include/finclude/petscis.h
81: #include include/finclude/petscmat.h
82: #include include/finclude/petscksp.h
83: #include include/finclude/petscpc.h
84: #include include/finclude/petscsles.h
85: #include include/finclude/petscsnes.h
87: #include "include/finclude/petscvec.h90"
90: ! Input/output variables:
91: SNES snes
92: Vec X,F,localX,localF
93: integer ierr
94: type (userctx) user
96: ! Declarations for use with local arrays:
97: Scalar,pointer :: lx_v(:),lf_v(:)
99: ! Scatter ghost points to local vector, using the 2-step process
100: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
101: ! By placing code between these two statements, computations can
102: ! be done while messages are in transition.
104: call DAGetLocalVector(user%da,localX,ierr)
105: call DAGetLocalVector(user%da,localF,ierr)
106: call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES, &
107: & localX,ierr)
108: call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
110: ! Get a pointer to vector data.
111: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
112: ! the data array. Otherwise, the routine is implementation dependent.
113: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
114: ! the array.
115: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
116: ! and is useable from Fortran-90 Only.
118: call VecGetArrayF90(localX,lx_v,ierr)
119: call VecGetArrayF90(localF,lf_v,ierr)
121: ! Compute function over the locally owned part of the grid
123: call ApplicationFunction(lx_v,lf_v,user,ierr)
125: ! Restore vectors
127: call VecRestoreArrayF90(localX,lx_v,ierr)
128: call VecRestoreArrayF90(localF,lf_v,ierr)
130: ! Insert values into global vector
132: call DALocalToGlobal(user%da,localF,INSERT_VALUES,F,ierr)
133: call DARestoreLocalVector(user%da,localX,ierr)
134: call DARestoreLocalVector(user%da,localF,ierr)
135: call PetscLogFlops(11*user%ym*user%xm,ierr)
137: ! call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
138: ! call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
140: return
141: end subroutine formfunction
142: end module f90module
146: program main
147: use f90module
148: implicit none
149: !
150: !
151: #include include/finclude/petsc.h
152: #include include/finclude/petscvec.h
153: #include include/finclude/petscda.h
154: #include include/finclude/petscis.h
155: #include include/finclude/petscmat.h
156: #include include/finclude/petscksp.h
157: #include include/finclude/petscpc.h
158: #include include/finclude/petscsles.h
159: #include include/finclude/petscsnes.h
160: #include "include/finclude/petscvec.h90"
161: #include "include/finclude/petscda.h90"
163: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: ! Variable declarations
165: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166: !
167: ! Variables:
168: ! snes - nonlinear solver
169: ! x, r - solution, residual vectors
170: ! J - Jacobian matrix
171: ! its - iterations for convergence
172: ! Nx, Ny - number of preocessors in x- and y- directions
173: ! matrix_free - flag - 1 indicates matrix-free version
174: !
175: !
176: SNES snes
177: Vec x,r
178: Mat J
179: ISLocalToGlobalMapping isltog
180: integer its,Nx,Ny,matrix_free,flg,N,ierr,m
181: double precision lambda_max,lambda_min
182: integer,pointer :: ltog_a(:)
183: integer nloc
184: type (userctx) user
186: ! Note: Any user-defined Fortran routines (such as FormJacobian)
187: ! MUST be declared as external.
189: external FormInitialGuess,FormJacobian
191: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
192: ! Initialize program
193: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
196: call MPI_Comm_size(PETSC_COMM_WORLD,user%size,ierr)
197: call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)
199: ! Initialize problem parameters
201: lambda_max = 6.81
202: lambda_min = 0.0
203: user%lambda = 6.0
204: user%mx = 4
205: user%my = 4
206: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-mx',user%mx, &
207: & flg,ierr)
208: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-my',user%my, &
209: & flg,ierr)
210: call PetscOptionsGetDouble(PETSC_NULL_CHARACTER,'-par', &
211: & user%lambda,flg,ierr)
212: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
213: & then
214: if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
215: SETERRQ(1,' ',ierr)
216: endif
217: N = user%mx*user%my
220: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
221: ! Create nonlinear solver context
222: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: call SNESCreate(PETSC_COMM_WORLD,SNES_NONLINEAR_EQUATIONS, &
225: & snes,ierr)
227: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
228: ! Create vector data structures; set function evaluation routine
229: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
231: ! Create distributed array (DA) to manage parallel grid and vectors
233: Nx = PETSC_DECIDE
234: Ny = PETSC_DECIDE
235: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-Nx',Nx,flg,ierr)
236: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-Ny',Ny,flg,ierr)
237: if (Nx*Ny .ne. user%size .and. &
238: & (Nx .ne. PETSC_DECIDE .or. Ny .ne. PETSC_DECIDE)) then
239: if (user%rank .eq. 0) then
240: write(6,*) 'Incompatible number of procs: Nx * Ny != size'
241: endif
242: SETERRQ(1,' ',ierr)
243: endif
244: ! This really needs only the star-type stencil, but we use the box
245: ! stencil temporarily.
246: call DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_BOX, &
247: & user%mx,user%my,Nx,Ny,1,1,PETSC_NULL_INTEGER, &
248: & PETSC_NULL_INTEGER,user%da,ierr)
250: !
251: ! Visualize the distribution of the array across the processors
252: !
253: ! call DAView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr)
255: ! Extract global and local vectors from DA; then duplicate for remaining
256: ! vectors that are the same types
258: call DACreateGlobalVector(user%da,x,ierr)
259: call VecDuplicate(x,r,ierr)
261: ! Get local grid boundaries (for 2-dimensional DA)
263: call DAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER, &
264: & user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
265: call DAGetGhostCorners(user%da,user%gxs,user%gys, &
266: & PETSC_NULL_INTEGER,user%gxm,user%gym, &
267: & PETSC_NULL_INTEGER,ierr)
269: ! Here we shift the starting indices up by one so that we can easily
270: ! use the Fortran convention of 1-based indices (rather 0-based indices).
272: user%xs = user%xs+1
273: user%ys = user%ys+1
274: user%gxs = user%gxs+1
275: user%gys = user%gys+1
277: user%ye = user%ys+user%ym-1
278: user%xe = user%xs+user%xm-1
279: user%gye = user%gys+user%gym-1
280: user%gxe = user%gxs+user%gxm-1
282: ! Set function evaluation routine and vector
284: call SNESSetFunction(snes,r,FormFunction,user,ierr)
286: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
287: ! Create matrix data structure; set Jacobian evaluation routine
288: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
290: ! Set Jacobian matrix data structure and default Jacobian evaluation
291: ! routine. User can override with:
292: ! -snes_fd : default finite differencing approximation of Jacobian
293: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
294: ! (unless user explicitly sets preconditioner)
295: ! -snes_mf_operator : form preconditioning matrix as set by the user,
296: ! but use matrix-free approx for Jacobian-vector
297: ! products within Newton-Krylov method
298: !
299: ! Note: For the parallel case, vectors and matrices MUST be partitioned
300: ! accordingly. When using distributed arrays (DAs) to create vectors,
301: ! the DAs determine the problem partitioning. We must explicitly
302: ! specify the local matrix dimensions upon its creation for compatibility
303: ! with the vector distribution. Thus, the generic MatCreate() routine
304: ! is NOT sufficient when working with distributed arrays.
305: !
306: ! Note: Here we only approximately preallocate storage space for the
307: ! Jacobian. See the users manual for a discussion of better techniques
308: ! for preallocating matrix memory.
310: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf', &
311: & matrix_free,ierr)
312: if (matrix_free .eq. 0) then
313: if (user%size .eq. 1) then
314: call MatCreateSeqAIJ(PETSC_COMM_WORLD,N,N,5, &
315: & PETSC_NULL_INTEGER,J,ierr)
316: else
317: call VecGetLocalSize(x,m,ierr)
318: call MatCreateMPIAIJ(PETSC_COMM_WORLD,m,m,N,N,5, &
319: & PETSC_NULL_INTEGER,3,PETSC_NULL_INTEGER,J,ierr)
320: endif
321: call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
323: ! Get the global node numbers for all local nodes, including ghost points.
324: ! Associate this mapping with the matrix for later use in setting matrix
325: ! entries via MatSetValuesLocal().
326: ! - Note that the interface to DAGetGlobalIndicesF90() differs from
327: ! DAGetGlobalIndices().
329: call DAGetGlobalIndicesF90(user%da,nloc,ltog_a,ierr)
330: call ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,nloc, &
331: & ltog_a,isltog,ierr)
332: call MatSetLocalToGlobalMapping(J,isltog,ierr)
333: call ISLocalToGlobalMappingDestroy(isltog,ierr)
335: endif
337: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
338: ! Customize nonlinear solver; set runtime options
339: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
341: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
343: call SNESSetFromOptions(snes,ierr)
345: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
346: ! Evaluate initial guess; then solve nonlinear system.
347: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
349: ! Note: The user should initialize the vector, x, with the initial guess
350: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
351: ! to employ an initial guess of zero, the user should explicitly set
352: ! this vector to zero by calling VecSet().
354: call FormInitialGuess(user,x,ierr)
355: call SNESSolve(snes,x,its,ierr)
356: if (user%rank .eq. 0) then
357: write(6,100) its
358: endif
359: 100 format('Number of Newton iterations = ',i5)
361: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
362: ! Free work space. All PETSc objects should be destroyed when they
363: ! are no longer needed.
364: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
366: if (matrix_free .eq. 0) call MatDestroy(J,ierr)
367: call VecDestroy(x,ierr)
368: call VecDestroy(r,ierr)
369: call SNESDestroy(snes,ierr)
370: call DADestroy(user%da,ierr)
371: call PetscFinalize(ierr)
372: end
374: ! ---------------------------------------------------------------------
375: !
376: ! FormInitialGuess - Forms initial approximation.
377: !
378: ! Input Parameters:
379: ! X - vector
380: !
381: ! Output Parameter:
382: ! X - vector
383: !
384: ! Notes:
385: ! This routine serves as a wrapper for the lower-level routine
386: ! "ApplicationInitialGuess", where the actual computations are
387: ! done using the standard Fortran style of treating the local
388: ! vector data as a multidimensional array over the local mesh.
389: ! This routine merely handles ghost point scatters and accesses
390: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
391: !
392: subroutine FormInitialGuess(user,X,ierr)
393: use f90module
394: implicit none
396: #include "include/finclude/petscvec.h90"
397: #include include/finclude/petsc.h
398: #include include/finclude/petscvec.h
399: #include include/finclude/petscda.h
400: #include include/finclude/petscis.h
401: #include include/finclude/petscmat.h
402: #include include/finclude/petscksp.h
403: #include include/finclude/petscpc.h
404: #include include/finclude/petscsles.h
405: #include include/finclude/petscsnes.h
407: ! Input/output variables:
408: type (userctx) user
409: Vec X,localX
410: integer ierr
411:
412: ! Declarations for use with local arrays:
413: Scalar,pointer :: lx_v(:)
415: 0
417: ! Get a pointer to vector data.
418: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
419: ! the data array. Otherwise, the routine is implementation dependent.
420: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
421: ! the array.
422: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
423: ! and is useable from Fortran-90 Only.
425: call DAGetLocalVector(user%da,localX,ierr)
426: call VecGetArrayF90(localX,lx_v,ierr)
428: ! Compute initial guess over the locally owned part of the grid
430: call ApplicationInitialGuess(user,lx_v,ierr)
432: ! Restore vector
434: call VecRestoreArrayF90(localX,lx_v,ierr)
436: ! Insert values into global vector
438: call DALocalToGlobal(user%da,localX,INSERT_VALUES,X,ierr)
439: call DARestoreLocalVector(user%da,localX,ierr)
441: return
442: end
444: ! ---------------------------------------------------------------------
445: !
446: ! ApplicationInitialGuess - Computes initial approximation, called by
447: ! the higher level routine FormInitialGuess().
448: !
449: ! Input Parameter:
450: ! x - local vector data
451: !
452: ! Output Parameters:
453: ! x - local vector data
454: ! ierr - error code
455: !
456: ! Notes:
457: ! This routine uses standard Fortran-style computations over a 2-dim array.
458: !
459: subroutine ApplicationInitialGuess(user,x,ierr)
460: use f90module
461: implicit none
463: #include include/finclude/petsc.h
464: #include include/finclude/petscvec.h
465: #include include/finclude/petscda.h
466: #include include/finclude/petscis.h
467: #include include/finclude/petscmat.h
468: #include include/finclude/petscksp.h
469: #include include/finclude/petscpc.h
470: #include include/finclude/petscsles.h
471: #include include/finclude/petscsnes.h
473: ! Input/output variables:
474: type (userctx) user
475: Scalar x(user%gxs:user%gxe,user%gys:user%gye)
476: integer ierr
478: ! Local variables:
479: integer i,j,hxdhy,hydhx
480: Scalar temp1,temp,hx,hy,sc,one
482: ! Set parameters
484: ierr = 0
485: one = 1.0
486: hx = one/(dble(user%mx-1))
487: hy = one/(dble(user%my-1))
488: sc = hx*hy*user%lambda
489: hxdhy = hx/hy
490: hydhx = hy/hx
491: temp1 = user%lambda/(user%lambda + one)
493: do 20 j=user%ys,user%ye
494: temp = dble(min(j-1,user%my-j))*hy
495: do 10 i=user%xs,user%xe
496: if (i .eq. 1 .or. j .eq. 1 &
497: & .or. i .eq. user%mx .or. j .eq. user%my) then
498: x(i,j) = 0.0
499: else
500: x(i,j) = temp1 * &
501: & sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
502: endif
503: 10 continue
504: 20 continue
506: return
507: end
509: ! ---------------------------------------------------------------------
510: !
511: ! ApplicationFunction - Computes nonlinear function, called by
512: ! the higher level routine FormFunction().
513: !
514: ! Input Parameter:
515: ! x - local vector data
516: !
517: ! Output Parameters:
518: ! f - local vector data, f(x)
519: ! ierr - error code
520: !
521: ! Notes:
522: ! This routine uses standard Fortran-style computations over a 2-dim array.
523: !
524: subroutine ApplicationFunction(x,f,user,ierr)
525: use f90module
527: implicit none
529: ! Input/output variables:
530: type (userctx) user
531: Scalar x(user%gxs:user%gxe,user%gys:user%gye)
532: Scalar f(user%gxs:user%gxe,user%gys:user%gye)
533: integer ierr
535: ! Local variables:
536: Scalar two,one,hx,hy,hxdhy,hydhx,sc
537: Scalar u,uxx,uyy
538: integer i,j
540: one = 1.0
541: two = 2.0
542: hx = one/dble(user%mx-1)
543: hy = one/dble(user%my-1)
544: sc = hx*hy*user%lambda
545: hxdhy = hx/hy
546: hydhx = hy/hx
548: ! Compute function over the locally owned part of the grid
550: do 20 j=user%ys,user%ye
551: do 10 i=user%xs,user%xe
552: if (i .eq. 1 .or. j .eq. 1 &
553: & .or. i .eq. user%mx .or. j .eq. user%my) then
554: f(i,j) = x(i,j)
555: else
556: u = x(i,j)
557: uxx = hydhx * (two*u &
558: & - x(i-1,j) - x(i+1,j))
559: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
560: f(i,j) = uxx + uyy - sc*exp(u)
561: endif
562: 10 continue
563: 20 continue
565: return
566: end
568: ! ---------------------------------------------------------------------
569: !
570: ! FormJacobian - Evaluates Jacobian matrix.
571: !
572: ! Input Parameters:
573: ! snes - the SNES context
574: ! x - input vector
575: ! dummy - optional user-defined context, as set by SNESSetJacobian()
576: ! (not used here)
577: !
578: ! Output Parameters:
579: ! jac - Jacobian matrix
580: ! jac_prec - optionally different preconditioning matrix (not used here)
581: ! flag - flag indicating matrix structure
582: !
583: ! Notes:
584: ! This routine serves as a wrapper for the lower-level routine
585: ! "ApplicationJacobian", where the actual computations are
586: ! done using the standard Fortran style of treating the local
587: ! vector data as a multidimensional array over the local mesh.
588: ! This routine merely accesses the local vector data via
589: ! VecGetArrayF90() and VecRestoreArrayF90().
590: !
591: ! Notes:
592: ! Due to grid point reordering with DAs, we must always work
593: ! with the local grid points, and then transform them to the new
594: ! global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
595: ! We cannot work directly with the global numbers for the original
596: ! uniprocessor grid!
597: !
598: ! Two methods are available for imposing this transformation
599: ! when setting matrix entries:
600: ! (A) MatSetValuesLocal(), using the local ordering (including
601: ! ghost points!)
602: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
603: ! - Associate this map with the matrix by calling
604: ! MatSetLocalToGlobalMapping() once
605: ! - Set matrix entries using the local ordering
606: ! by calling MatSetValuesLocal()
607: ! (B) MatSetValues(), using the global ordering
608: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
609: ! - Then apply this map explicitly yourself
610: ! - Set matrix entries using the global ordering by calling
611: ! MatSetValues()
612: ! Option (A) seems cleaner/easier in many cases, and is the procedure
613: ! used in this example.
614: !
615: subroutine FormJacobian(snes,X,jac,jac_prec,flag,user,ierr)
616: use f90module
617: implicit none
619: #include include/finclude/petsc.h
620: #include include/finclude/petscvec.h
621: #include include/finclude/petscda.h
622: #include include/finclude/petscis.h
623: #include include/finclude/petscmat.h
624: #include include/finclude/petscksp.h
625: #include include/finclude/petscpc.h
626: #include include/finclude/petscsles.h
627: #include include/finclude/petscsnes.h
629: #include "include/finclude/petscvec.h90"
631: ! Input/output variables:
632: SNES snes
633: Vec X,localX
634: Mat jac,jac_prec
635: MatStructure flag
636: type(userctx) user
637: integer ierr
639: ! Declarations for use with local arrays:
640: Scalar,pointer :: lx_v(:)
642: ! Scatter ghost points to local vector, using the 2-step process
643: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd()
644: ! Computations can be done while messages are in transition,
645: ! by placing code between these two statements.
647: call DAGetLocalVector(user%da,localX,ierr)
648: call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX, &
649: & ierr)
650: call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
652: ! Get a pointer to vector data
654: call VecGetArrayF90(localX,lx_v,ierr)
656: ! Compute entries for the locally owned part of the Jacobian.
658: call ApplicationJacobian(lx_v,jac,jac_prec,user,ierr)
660: ! Assemble matrix, using the 2-step process:
661: ! MatAssemblyBegin(), MatAssemblyEnd()
662: ! Computations can be done while messages are in transition,
663: ! by placing code between these two statements.
665: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
666: call VecRestoreArrayF90(localX,lx_v,ierr)
667: call DARestoreLocalVector(user%da,localX,ierr)
668: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
670: ! Set flag to indicate that the Jacobian matrix retains an identical
671: ! nonzero structure throughout all nonlinear iterations (although the
672: ! values of the entries change). Thus, we can save some work in setting
673: ! up the preconditioner (e.g., no need to redo symbolic factorization for
674: ! ILU/ICC preconditioners).
675: ! - If the nonzero structure of the matrix is different during
676: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
677: ! must be used instead. If you are unsure whether the matrix
678: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
679: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
680: ! believes your assertion and does not check the structure
681: ! of the matrix. If you erroneously claim that the structure
682: ! is the same when it actually is not, the new preconditioner
683: ! will not function correctly. Thus, use this optimization
684: ! feature with caution!
686: flag = SAME_NONZERO_PATTERN
688: ! Tell the matrix we will never add a new nonzero location to the
689: ! matrix. If we do it will generate an error.
691: call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,ierr)
693: return
694: end
696: ! ---------------------------------------------------------------------
697: !
698: ! ApplicationJacobian - Computes Jacobian matrix, called by
699: ! the higher level routine FormJacobian().
700: !
701: ! Input Parameters:
702: ! x - local vector data
703: !
704: ! Output Parameters:
705: ! jac - Jacobian matrix
706: ! jac_prec - optionally different preconditioning matrix (not used here)
707: ! ierr - error code
708: !
709: ! Notes:
710: ! This routine uses standard Fortran-style computations over a 2-dim array.
711: !
712: ! Notes:
713: ! Due to grid point reordering with DAs, we must always work
714: ! with the local grid points, and then transform them to the new
715: ! global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
716: ! We cannot work directly with the global numbers for the original
717: ! uniprocessor grid!
718: !
719: ! Two methods are available for imposing this transformation
720: ! when setting matrix entries:
721: ! (A) MatSetValuesLocal(), using the local ordering (including
722: ! ghost points!)
723: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
724: ! - Associate this map with the matrix by calling
725: ! MatSetLocalToGlobalMapping() once
726: ! - Set matrix entries using the local ordering
727: ! by calling MatSetValuesLocal()
728: ! (B) MatSetValues(), using the global ordering
729: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
730: ! - Then apply this map explicitly yourself
731: ! - Set matrix entries using the global ordering by calling
732: ! MatSetValues()
733: ! Option (A) seems cleaner/easier in many cases, and is the procedure
734: ! used in this example.
735: !
736: subroutine ApplicationJacobian(x,jac,jac_prec,user,ierr)
737: use f90module
738: implicit none
740: #include include/finclude/petsc.h
741: #include include/finclude/petscvec.h
742: #include include/finclude/petscda.h
743: #include include/finclude/petscis.h
744: #include include/finclude/petscmat.h
745: #include include/finclude/petscksp.h
746: #include include/finclude/petscpc.h
747: #include include/finclude/petscsles.h
748: #include include/finclude/petscsnes.h
750: ! Input/output variables:
751: type (userctx) user
752: Scalar x(user%gxs:user%gxe,user%gys:user%gye)
753: Mat jac,jac_prec
754: integer ierr
756: ! Local variables:
757: integer row,col(5),i,j
758: Scalar two,one,hx,hy,hxdhy,hydhx,sc,v(5)
760: ! Set parameters
762: one = 1.0
763: two = 2.0
764: hx = one/dble(user%mx-1)
765: hy = one/dble(user%my-1)
766: sc = hx*hy
767: hxdhy = hx/hy
768: hydhx = hy/hx
770: ! Compute entries for the locally owned part of the Jacobian.
771: ! - Currently, all PETSc parallel matrix formats are partitioned by
772: ! contiguous chunks of rows across the processors.
773: ! - Each processor needs to insert only elements that it owns
774: ! locally (but any non-local elements will be sent to the
775: ! appropriate processor during matrix assembly).
776: ! - Here, we set all entries for a particular row at once.
777: ! - We can set matrix entries either using either
778: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
779: ! - Note that MatSetValues() uses 0-based row and column numbers
780: ! in Fortran as well as in C.
782: do 20 j=user%ys,user%ye
783: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
784: do 10 i=user%xs,user%xe
785: row = row + 1
786: ! boundary points
787: if (i .eq. 1 .or. j .eq. 1 &
788: & .or. i .eq. user%mx .or. j .eq. user%my) then
789: call MatSetValuesLocal(jac,1,row,1,row,one, &
790: & INSERT_VALUES,ierr)
791: ! interior grid points
792: else
793: v(1) = -hxdhy
794: v(2) = -hydhx
795: v(3) = two*(hydhx + hxdhy) &
796: & - sc*user%lambda*exp(x(i,j))
797: v(4) = -hydhx
798: v(5) = -hxdhy
799: col(1) = row - user%gxm
800: col(2) = row - 1
801: col(3) = row
802: col(4) = row + 1
803: col(5) = row + user%gxm
804: call MatSetValuesLocal(jac,1,row,5,col,v, &
805: & INSERT_VALUES,ierr)
806: endif
807: 10 continue
808: 20 continue
810: return
811: end