Actual source code: ex6f.F

  1: !
  2: !    "$Id: ex6f.F,v 1.34 2001/01/19 23:21:44 balay Exp $";
  3: !
  4: !  Description: This example demonstrates repeated linear solves as
  5: !  well as the use of different preconditioner and linear system
  6: !  matrices.  This example also illustrates how to save PETSc objects
  7: !  in common blocks.
  8: !
  9: !/*T
 10: !  Concepts: SLES^repeatedly solving linear systems;
 11: !  Concepts: SLES^different matrices for linear system and preconditioner;
 12: !  Processors: n
 13: !T*/
 14: !
 15: !  The following include statements are required for SLES Fortran programs:
 16: !     petsc.h       - base PETSc routines
 17: !     petscvec.h    - vectors
 18: !     petscmat.h    - matrices
 19: !     petscpc.h     - preconditioners
 20: !     petscksp.h    - Krylov subspace methods
 21: !     petscsles.h   - SLES interface
 22: !  Other include statements may be needed if using additional PETSc
 23: !  routines in a Fortran program, e.g.,
 24: !     petscviewer.h - viewers
 25: !     petscis.h     - index sets
 26: !
 27:       program main
 28: #include "include/finclude/petsc.h"
 29: #include "include/finclude/petscvec.h"
 30: #include "include/finclude/petscmat.h"
 31: #include "include/finclude/petscsles.h"
 32: #include "include/finclude/petscpc.h"
 33: #include "include/finclude/petscksp.h"

 35: !  Variables:
 36: !
 37: !  A       - matrix that defines linear system
 38: !  sles    - SLES context
 39: !  ksp     - KSP context
 40: !  x, b, u - approx solution, RHS, exact solution vectors
 41: !
 42:       Vec     x,u,b
 43:       Mat     A
 44:       SLES    sles
 45:       integer i,j,II,JJ,ierr,m,n
 46:       integer Istart,Iend,flg,nsteps
 47:       Scalar  v

 49:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 50:       m      = 3
 51:       n      = 3
 52:       nsteps = 2
 53:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 54:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
 55:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-nsteps',nsteps,    &
 56:      &     flg,ierr)

 58: !  Create parallel matrix, specifying only its global dimensions.
 59: !  When using MatCreate(), the matrix format can be specified at
 60: !  runtime. Also, the parallel partitioning of the matrix is
 61: !  determined by PETSc at runtime.

 63:       call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,    &
 64:      &               m*n,A,ierr)
 65:       call MatSetFromOptions(A,ierr)

 67: !  The matrix is partitioned by contiguous chunks of rows across the
 68: !  processors.  Determine which rows of the matrix are locally owned.

 70:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

 72: !  Set matrix elements.
 73: !   - Each processor needs to insert only elements that it owns
 74: !     locally (but any non-local elements will be sent to the
 75: !     appropriate processor during matrix assembly).
 76: !   - Always specify global rows and columns of matrix entries.

 78:       do 10, II=Istart,Iend-1
 79:         v = -1.0
 80:         i = II/n
 81:         j = II - i*n
 82:         if (i.gt.0) then
 83:           JJ = II - n
 84:           call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
 85:         endif
 86:         if (i.lt.m-1) then
 87:           JJ = II + n
 88:           call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
 89:         endif
 90:         if (j.gt.0) then
 91:           JJ = II - 1
 92:           call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
 93:         endif
 94:         if (j.lt.n-1) then
 95:           JJ = II + 1
 96:           call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
 97:         endif
 98:         v = 4.0
 99:         call  MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
100:  10   continue

102: !  Assemble matrix, using the 2-step process:
103: !       MatAssemblyBegin(), MatAssemblyEnd()
104: !  Computations can be done while messages are in transition
105: !  by placing code between these two statements.

107:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
108:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

110: !  Create parallel vectors.
111: !   - When using VecCreate(), the parallel partitioning of the vector
112: !     is determined by PETSc at runtime.
113: !   - Note: We form 1 vector from scratch and then duplicate as needed.

115:       call VecCreate(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
116:       call VecSetFromOptions(u,ierr)
117:       call VecDuplicate(u,b,ierr)
118:       call VecDuplicate(b,x,ierr)

120: !  Create linear solver context

122:       call SLESCreate(PETSC_COMM_WORLD,sles,ierr)

124: !  Set runtime options (e.g., -ksp_type <type> -pc_type <type>)

126:       call SLESSetFromOptions(sles,ierr)

128: !  Solve several linear systems in succession

130:       do 100 i=1,nsteps
131:          call solve1(sles,A,x,b,u,i,nsteps,ierr)
132:  100  continue

134: !  Free work space.  All PETSc objects should be destroyed when they
135: !  are no longer needed.

137:       call VecDestroy(u,ierr)
138:       call VecDestroy(x,ierr)
139:       call VecDestroy(b,ierr)
140:       call MatDestroy(A,ierr)
141:       call SLESDestroy(sles,ierr)

143:       call PetscFinalize(ierr)
144:       end

146: ! -----------------------------------------------------------------------
147: !
148:       subroutine solve1(sles,A,x,b,u,count,nsteps,ierr)

150: #include "include/finclude/petsc.h"
151: #include "include/finclude/petscvec.h"
152: #include "include/finclude/petscmat.h"
153: #include "include/finclude/petscsles.h"
154: #include "include/finclude/petscpc.h"
155: #include "include/finclude/petscksp.h"

157: !
158: !   solve1 - This routine is used for repeated linear system solves.
159: !   We update the linear system matrix each time, but retain the same
160: !   preconditioning matrix for all linear solves.
161: !
162: !      A - linear system matrix
163: !      A2 - preconditioning matrix
164: !
165:       Scalar  v,val
166:       integer II,ierr,Istart,Iend,count,nsteps,its
167:       Mat     A
168:       SLES    sles
169:       KSP     ksp
170:       Vec     x,b,u

172: ! Use common block to retain matrix between successive subroutine calls
173:       Mat              A2
174:       integer          rank,pflag
175:       common /my_data/ A2,pflag,rank

177: ! First time thorough: Create new matrix to define the linear system
178:       if (count .eq. 1) then
179:         call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
180:         pflag = 0
181:         call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mat_view',       &
182:      &       pflag,ierr)
183:         if (pflag .ne. 0) then
184:           if (rank .eq. 0) write(6,100)
185:         endif
186:         call MatConvert(A,MATSAME,A2,ierr)
187: ! All other times: Set previous solution as initial guess for next solve.
188:       else
189:         call SLESGetKSP(sles,ksp,ierr)
190:         call KSPSetInitialGuessNonzero(ksp,ierr)
191:       endif

193: ! Alter the matrix A a bit
194:       call MatGetOwnershipRange(A,Istart,Iend,ierr)
195:       do 20, II=Istart,Iend-1
196:         v = 2.0
197:         call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
198:  20   continue
199:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
200:       if (pflag .ne. 0) then
201:         if (rank .eq. 0) write(6,110)
202:       endif
203:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

205: ! Set the exact solution; compute the right-hand-side vector
206:       val = 1.0*count
207:       call VecSet(val,u,ierr)
208:       call MatMult(A,u,b,ierr)

210: ! Set operators, keeping the identical preconditioner matrix for
211: ! all linear solves.  This approach is often effective when the
212: ! linear systems do not change very much between successive steps.
213:       call SLESSetOperators(sles,A,A2,SAME_PRECONDITIONER,ierr)

215: ! Solve linear system
216:       call SLESSolve(sles,b,x,its,ierr)

218: ! Destroy the preconditioner matrix on the last time through
219:       if (count .eq. nsteps) call MatDestroy(A2,ierr)

221:  100  format('previous matrix: preconditioning')
222:  110  format('next matrix: defines linear system')

224:       end