Actual source code: ex15f.F

  1: !
  2: !   Solves a linear system in parallel with KSP.  Also indicates
  3: !   use of a user-provided preconditioner.  Input parameters include:
  4: !      -user_defined_pc : Activate a user-defined preconditioner
  5: !
  6: !  Program usage: mpiexec ex15f [-help] [all PETSc options]
  7: !
  8: !/*T
  9: !   Concepts: KSP^basic parallel example
 10: !   Concepts: PC^setting a user-defined shell preconditioner
 11: !   Processors: n
 12: !T*/
 13: !
 14: !  -------------------------------------------------------------------------

 16:       program main
 17:       implicit none

 19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 20: !                    Include files
 21: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 22: !
 23: !     petsc.h  - base PETSc routines      petscvec.h - vectors
 24: !     petscsys.h    - system routines          petscmat.h - matrices
 25: !     petscksp.h    - Krylov subspace methods  petscpc.h  - preconditioners

 27:  #include include/finclude/petsc.h
 28:  #include include/finclude/petscvec.h
 29:  #include include/finclude/petscmat.h
 30:  #include include/finclude/petscpc.h
 31:  #include include/finclude/petscksp.h

 33: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 34: !                   Variable declarations
 35: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 36: !
 37: !  Variables:
 38: !     ksp     - linear solver context
 39: !     ksp      - Krylov subspace method context
 40: !     pc       - preconditioner context
 41: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 42: !     A        - matrix that defines linear system
 43: !     its      - iterations for convergence
 44: !     norm     - norm of solution error

 46:       Vec              x,b,u
 47:       Mat              A
 48:       PC               pc
 49:       KSP              ksp
 50:       PetscScalar      v,one,neg_one
 51:       double precision norm,tol
 52:       PetscErrorCode ierr
 53:       PetscInt   i,j,II,JJ,Istart
 54:       PetscInt   Iend,m,n,i1,its
 55:       PetscMPIInt rank
 56:       PetscTruth user_defined_pc,flg

 58: !  Note: Any user-defined Fortran routines MUST be declared as external.

 60:       external SampleShellPCSetUp, SampleShellPCApply

 62: !  Common block to store data for user-provided preconditioner
 63:       common /myshellpc/ diag
 64:       Vec    diag

 66: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 67: !                 Beginning of program
 68: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 70:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 71:       one     = 1.0
 72:       neg_one = -1.0
 73:       i1 = 1
 74:       m       = 8
 75:       n       = 7
 76:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 77:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
 78:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)

 80: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 81: !      Compute the matrix and right-hand-side vector that define
 82: !      the linear system, Ax = b.
 83: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 85: !  Create parallel matrix, specifying only its global dimensions.
 86: !  When using MatCreate(), the matrix format can be specified at
 87: !  runtime. Also, the parallel partitioning of the matrix is
 88: !  determined by PETSc at runtime.

 90:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 91:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
 92:       call MatSetType(A, MATAIJ,ierr)
 93:       call MatSetFromOptions(A,ierr)
 94:       call MatMPIAIJSetPreallocation(A,5,PETSC_NULL_INTEGER,5,
 95:      &                     PETSC_NULL_CHARACTER,ierr)
 96:       call MatSeqAIJSetPreallocation(A,5,PETSC_NULL_INTEGER,ierr)

 98: !  Currently, all PETSc parallel matrix formats are partitioned by
 99: !  contiguous chunks of rows across the processors.  Determine which
100: !  rows of the matrix are locally owned.

102:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

104: !  Set matrix elements for the 2-D, five-point stencil in parallel.
105: !   - Each processor needs to insert only elements that it owns
106: !     locally (but any non-local elements will be sent to the
107: !     appropriate processor during matrix assembly).
108: !   - Always specify global row and columns of matrix entries.
109: !   - Note that MatSetValues() uses 0-based row and column numbers
110: !     in Fortran as well as in C.

112:       do 10, II=Istart,Iend-1
113:         v = -1.0
114:         i = II/n
115:         j = II - i*n
116:         if (i.gt.0) then
117:           JJ = II - n
118:           call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
119:         endif
120:         if (i.lt.m-1) then
121:           JJ = II + n
122:           call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
123:         endif
124:         if (j.gt.0) then
125:           JJ = II - 1
126:           call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
127:         endif
128:         if (j.lt.n-1) then
129:           JJ = II + 1
130:           call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
131:         endif
132:         v = 4.0
133:         call  MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)
134:  10   continue

136: !  Assemble matrix, using the 2-step process:
137: !       MatAssemblyBegin(), MatAssemblyEnd()
138: !  Computations can be done while messages are in transition,
139: !  by placing code between these two statements.

141:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
142:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

144: !  Create parallel vectors.
145: !   - Here, the parallel partitioning of the vector is determined by
146: !     PETSc at runtime.  We could also specify the local dimensions
147: !     if desired -- or use the more general routine VecCreate().
148: !   - When solving a linear system, the vectors and matrices MUST
149: !     be partitioned accordingly.  PETSc automatically generates
150: !     appropriately partitioned matrices and vectors when MatCreate()
151: !     and VecCreate() are used with the same communicator.
152: !   - Note: We form 1 vector from scratch and then duplicate as needed.

154:       call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
155:       call VecDuplicate(u,b,ierr)
156:       call VecDuplicate(b,x,ierr)

158: !  Set exact solution; then compute right-hand-side vector.

160:       call VecSet(u,one,ierr)
161:       call MatMult(A,u,b,ierr)

163: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
164: !         Create the linear solver and set various options
165: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

167: !  Create linear solver context

169:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

171: !  Set operators. Here the matrix that defines the linear system
172: !  also serves as the preconditioning matrix.

174:       call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)

176: !  Set linear solver defaults for this problem (optional).
177: !   - By extracting the KSP and PC contexts from the KSP context,
178: !     we can then directly directly call any KSP and PC routines
179: !     to set various options.

181:       call KSPGetPC(ksp,pc,ierr)
182:       tol = 1.e-7
183:       call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION,     &
184:      &     PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)

186: !
187: !  Set a user-defined shell preconditioner if desired
188: !
189:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-user_defined_pc',      &
190:      &                    user_defined_pc,ierr)

192:       if (user_defined_pc .eq. 1) then

194: !  (Required) Indicate to PETSc that we are using a shell preconditioner
195:          call PCSetType(pc,PCSHELL,ierr)

197: !  (Required) Set the user-defined routine for applying the preconditioner
198:          call PCShellSetApply(pc,SampleShellPCApply,ierr)

200: !  (Optional) Do any setup required for the preconditioner
201:          call SampleShellPCSetUp(A,x,ierr)

203:       else
204:          call PCSetType(pc,PCJACOBI,ierr)
205:       endif

207: !  Set runtime options, e.g.,
208: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
209: !  These options will override those specified above as long as
210: !  KSPSetFromOptions() is called _after_ any other customization
211: !  routines.

213:       call KSPSetFromOptions(ksp,ierr)

215: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216: !                      Solve the linear system
217: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

219:       call KSPSolve(ksp,b,x,ierr)

221: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
222: !                     Check solution and clean up
223: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

225: !  Check the error

227:       call VecAXPY(x,neg_one,u,ierr)
228:       call VecNorm(x,NORM_2,norm,ierr)
229:       call KSPGetIterationNumber(ksp,its,ierr)

231:       if (rank .eq. 0) then
232:         if (norm .gt. 1.e-12) then
233:            write(6,100) norm,its
234:         else
235:            write(6,110) its
236:         endif
237:       endif
238:   100 format('Norm of error ',1pe10.4,' iterations ',i5)
239:   110 format('Norm of error < 1.e-12,iterations ',i5)

241: !  Free work space.  All PETSc objects should be destroyed when they
242: !  are no longer needed.

244:       call KSPDestroy(ksp,ierr)
245:       call VecDestroy(u,ierr)
246:       call VecDestroy(x,ierr)
247:       call VecDestroy(b,ierr)
248:       call MatDestroy(A,ierr)
249:       if (user_defined_pc .eq. 1) then
250:          call VecDestroy(diag,ierr)
251:       endif


254: !  Always call PetscFinalize() before exiting a program.

256:       call PetscFinalize(ierr)
257:       end

259: !/***********************************************************************/
260: !/*          Routines for a user-defined shell preconditioner           */
261: !/***********************************************************************/

263: !
264: !   SampleShellPCSetUp - This routine sets up a user-defined
265: !   preconditioner context.
266: !
267: !   Input Parameters:
268: !   pmat  - preconditioner matrix
269: !   x     - vector
270: !
271: !   Output Parameter:
272: !   ierr  - error code (nonzero if error has been detected)
273: !
274: !   Notes:
275: !   In this example, we define the shell preconditioner to be Jacobi
276: !   method.  Thus, here we create a work vector for storing the reciprocal
277: !   of the diagonal of the preconditioner matrix; this vector is then
278: !   used within the routine SampleShellPCApply().
279: !
280:       subroutine SampleShellPCSetUp(pmat,x,ierr)

282:       implicit none

284:  #include include/finclude/petsc.h
285:  #include include/finclude/petscvec.h
286:  #include include/finclude/petscmat.h

288:       Vec     x
289:       Mat     pmat
290:       integer ierr

292: !  Common block to store data for user-provided preconditioner
293:       common /myshellpc/ diag
294:       Vec    diag

296:       call VecDuplicate(x,diag,ierr)
297:       call MatGetDiagonal(pmat,diag,ierr)
298:       call VecReciprocal(diag,ierr)

300:       end

302: ! -------------------------------------------------------------------
303: !
304: !   SampleShellPCApply - This routine demonstrates the use of a
305: !   user-provided preconditioner.
306: !
307: !   Input Parameters:
308: !   dummy - optional user-defined context, not used here
309: !   x - input vector
310: !
311: !   Output Parameters:
312: !   y - preconditioned vector
313: !   ierr  - error code (nonzero if error has been detected)
314: !
315: !   Notes:
316: !   This code implements the Jacobi preconditioner, merely as an
317: !   example of working with a PCSHELL.  Note that the Jacobi method
318: !   is already provided within PETSc.
319: !
320:       subroutine SampleShellPCApply(dummy,x,y,ierr)

322:       implicit none

324:  #include include/finclude/petsc.h
325:  #include include/finclude/petscvec.h

327:       Vec     x,y
328:       integer dummy,ierr

330: !  Common block to store data for user-provided preconditioner
331:       common /myshellpc/ diag
332:       Vec    diag

334:       call VecPointwiseMult(y,x,diag,ierr)

336:       end