Actual source code: ex14f.F

  1: !
  2: !
  3: !  Solves a nonlinear system in parallel with a user-defined
  4: !  Newton method that uses KSP to solve the linearized Newton sytems.  This solver
  5: !  is a very simplistic inexact Newton method.  The intent of this code is to
  6: !  demonstrate the repeated solution of linear sytems with the same nonzero pattern.
  7: !
  8: !  This is NOT the recommended approach for solving nonlinear problems with PETSc!
  9: !  We urge users to employ the SNES component for solving nonlinear problems whenever
 10: !  possible, as it offers many advantages over coding nonlinear solvers independently.
 11: !
 12: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
 13: !  domain, using distributed arrays (DAs) to partition the parallel grid.
 14: !
 15: !  The command line options include:
 16: !  -par <parameter>, where <parameter> indicates the problem's nonlinearity
 17: !     problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
 18: !  -mx <xg>, where <xg> = number of grid points in the x-direction
 19: !  -my <yg>, where <yg> = number of grid points in the y-direction
 20: !  -Nx <npx>, where <npx> = number of processors in the x-direction
 21: !  -Ny <npy>, where <npy> = number of processors in the y-direction
 22: !  -mf use matrix free for matrix vector product
 23: !
 24: !/*T
 25: !   Concepts: KSP^writing a user-defined nonlinear solver
 26: !   Concepts: DA^using distributed arrays
 27: !   Processors: n
 28: !T*/
 29: !  ------------------------------------------------------------------------
 30: !
 31: !    Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 32: !    the partial differential equation
 33: !
 34: !            -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 35: !
 36: !    with boundary conditions
 37: !
 38: !             u = 0  for  x = 0, x = 1, y = 0, y = 1.
 39: !
 40: !    A finite difference approximation with the usual 5-point stencil
 41: !    is used to discretize the boundary value problem to obtain a nonlinear
 42: !    system of equations.
 43: !
 44: !    The SNES version of this problem is:  snes/examples/tutorials/ex5f.F
 45: !
 46: !  -------------------------------------------------------------------------

 48:       program main
 49:       implicit none

 51: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 52: !                    Include files
 53: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 54: !
 55: !     petsc.h       - base PETSc routines   petscvec.h - vectors
 56: !     petscsys.h    - system routines       petscmat.h - matrices
 57: !     petscis.h     - index sets            petscksp.h - Krylov subspace methods
 58: !     petscviewer.h - viewers               petscpc.h  - preconditioners

 60:  #include include/finclude/petsc.h
 61:  #include include/finclude/petscis.h
 62:  #include include/finclude/petscvec.h
 63:  #include include/finclude/petscmat.h
 64:  #include include/finclude/petscpc.h
 65:  #include include/finclude/petscksp.h
 66:  #include include/finclude/petscda.h

 68:       MPI_Comm comm
 69:       Vec      X,Y,F,localX,localF
 70:       Mat      J,B
 71:       DA       da
 72:       KSP      ksp

 74:       PetscInt  Nx,Ny,N,mx,my,ifive,ithree
 75:       PetscTruth flg,nooutput,usemf
 76:       PetscMPIInt rank
 77:       common   /mycommon/ mx,my,B,localX,localF,da
 78: !
 79: !
 80: !      This is the routine to use for matrix-free approach
 81: !
 82:       external mymult

 84: !     --------------- Data to define nonlinear solver --------------
 85:       double precision   rtol,xtol,ttol
 86:       double precision   fnorm,ynorm,xnorm
 87:       PetscInt            max_nonlin_its,one
 88:       PetscInt            lin_its
 89:       PetscInt           i,m
 90:       PetscScalar        mone
 91:       PetscErrorCode ierr

 93:       mone           = -1.d0
 94:       rtol           = 1.d-8
 95:       xtol           = 1.d-8
 96:       max_nonlin_its = 10
 97:       one            = 1
 98:       ifive          = 5
 99:       ithree         = 3

101:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
102:       comm = PETSC_COMM_WORLD

104: !  Initialize problem parameters

106: !
107:       mx = 4
108:       my = 4
109:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-mx',mx,flg,ierr)
110:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-my',my,flg,ierr)
111:       N = mx*my

113:       nooutput = 0
114:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-no_output',       &
115:      &     nooutput,ierr)

117: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: !     Create linear solver context
119: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

121:       call KSPCreate(comm,ksp,ierr)

123: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124: !     Create vector data structures
125: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

127: !
128: !  Create distributed array (DA) to manage parallel grid and vectors
129: !
130:       Nx = PETSC_DECIDE
131:       Ny = PETSC_DECIDE
132:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-Nx',Nx,flg,ierr)
133:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-Ny',Ny,flg,ierr)
134:       call DACreate2d(comm,DA_NONPERIODIC,DA_STENCIL_STAR,mx,           &
135:      &     my,Nx,Ny,one,one,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,       &
136:      &     da,ierr)

138: !
139: !  Extract global and local vectors from DA then duplicate for remaining
140: !  vectors that are the same types
141: !
142:        call DACreateGlobalVector(da,X,ierr)
143:        call DACreateLocalVector(da,localX,ierr)
144:        call VecDuplicate(X,F,ierr)
145:        call VecDuplicate(X,Y,ierr)
146:        call VecDuplicate(localX,localF,ierr)


149: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: !     Create matrix data structure for Jacobian
151: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: !
153: !     Note:  For the parallel case, vectors and matrices MUST be partitioned
154: !     accordingly.  When using distributed arrays (DAs) to create vectors,
155: !     the DAs determine the problem partitioning.  We must explicitly
156: !     specify the local matrix dimensions upon its creation for compatibility
157: !     with the vector distribution.  Thus, the generic MatCreate() routine
158: !     is NOT sufficient when working with distributed arrays.
159: !
160: !     Note: Here we only approximately preallocate storage space for the
161: !     Jacobian.  See the users manual for a discussion of better techniques
162: !     for preallocating matrix memory.
163: !
164:       call VecGetLocalSize(X,m,ierr)
165:       call MatCreateMPIAIJ(comm,m,m,N,N,ifive,PETSC_NULL_INTEGER,ithree,         &
166:      &     PETSC_NULL_INTEGER,B,ierr)

168: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169: !     if usemf is on then matrix vector product is done via matrix free
170: !     approach. Note this is just an example, and not realistic because
171: !     we still use the actual formed matrix, but in reality one would
172: !     provide their own subroutine that would directly do the matrix
173: !     vector product and not call MatMult()
174: !     Note: we put B into a common block so it will be visible to the
175: !     mymult() routine
176:       usemf = 0
177:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mf',usemf,ierr)
178:       if (usemf .eq. 1) then
179:          call MatCreateShell(comm,m,m,N,N,PETSC_NULL_INTEGER,J,ierr)
180:          call MatShellSetOperation(J,MATOP_MULT,mymult,ierr)
181:       else
182: !        If not doing matrix free then matrix operator, J,  and matrix used
183: !        to construct preconditioner, B, are the same
184:         J = B
185:       endif

187: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188: !     Customize linear solver set runtime options
189: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
190: !
191: !     Set runtime options (e.g., -ksp_monitor -ksp_rtol <rtol> -ksp_type <type>)
192: !
193:        call KSPSetFromOptions(ksp,ierr)

195: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196: !     Evaluate initial guess
197: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

199:        call FormInitialGuess(X,ierr)
200:        call ComputeFunction(X,F,ierr)
201:        call VecNorm(F,NORM_2,fnorm,ierr)
202:        ttol = fnorm*rtol
203:        if (nooutput .eq. 0) then
204:          print*, 'Initial function norm ',fnorm
205:        endif

207: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: !     Solve nonlinear system with a user-defined method
209: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

211: !  This solver is a very simplistic inexact Newton method, with no
212: !  no damping strategies or bells and whistles. The intent of this code
213: !  is merely to demonstrate the repeated solution with KSP of linear
214: !  sytems with the same nonzero structure.
215: !
216: !  This is NOT the recommended approach for solving nonlinear problems
217: !  with PETSc!  We urge users to employ the SNES component for solving
218: !  nonlinear problems whenever possible with application codes, as it
219: !  offers many advantages over coding nonlinear solvers independently.

221:        do 10 i=0,max_nonlin_its

223: !  Compute the Jacobian matrix.  See the comments in this routine for
224: !  important information about setting the flag mat_flag.

226:          call ComputeJacobian(X,B,ierr)

228: !  Solve J Y = F, where J is the Jacobian matrix.
229: !    - First, set the KSP linear operators.  Here the matrix that
230: !      defines the linear system also serves as the preconditioning
231: !      matrix.
232: !    - Then solve the Newton system.

234:          call KSPSetOperators(ksp,J,B,SAME_NONZERO_PATTERN,ierr)
235:          call KSPSolve(ksp,F,Y,ierr)

237: !  Compute updated iterate

239:          call VecNorm(Y,NORM_2,ynorm,ierr)
240:          call VecAYPX(mone,X,Y,ierr)
241:          call VecCopy(Y,X,ierr)
242:          call VecNorm(X,NORM_2,xnorm,ierr)
243:          call KSPGetIterationNumber(ksp,lin_its,ierr)
244:          if (nooutput .eq. 0) then
245:            print*,'linear solve iterations = ',lin_its,' xnorm = ',     &
246:      &         xnorm,' ynorm = ',ynorm
247:          endif

249: !  Evaluate nonlinear function at new location

251:          call ComputeFunction(X,F,ierr)
252:          call VecNorm(F,NORM_2,fnorm,ierr)
253:          if (nooutput .eq. 0) then
254:            print*, 'Iteration ',i+1,' function norm',fnorm
255:          endif

257: !  Test for convergence

259:        if (fnorm .le. ttol) then
260:          if (nooutput .eq. 0) then
261:            print*,'Converged: function norm ',fnorm,' tolerance ',ttol
262:          endif
263:          goto 20
264:        endif
265:  10   continue
266:  20   continue

268:       write(6,100) i+1
269:  100  format('Number of Newton iterations =',I2)

271: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
272: !     Free work space.  All PETSc objects should be destroyed when they
273: !     are no longer needed.
274: !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

276:        call MatDestroy(B,ierr)
277:        if (usemf .ne. 0) then
278:          call MatDestroy(J,ierr)
279:        endif
280:        call VecDestroy(localX,ierr)
281:        call VecDestroy(X,ierr)
282:        call VecDestroy(Y,ierr)
283:        call VecDestroy(localF,ierr)
284:        call VecDestroy(F,ierr)
285:        call KSPDestroy(ksp,ierr)
286:        call DADestroy(da,ierr)
287:        call PetscFinalize(ierr)
288:        end

290: ! -------------------------------------------------------------------
291: !
292: !   FormInitialGuess - Forms initial approximation.
293: !
294: !   Input Parameters:
295: !   X - vector
296: !
297: !   Output Parameter:
298: !   X - vector
299: !
300:       subroutine FormInitialGuess(X,ierr)
301:       implicit none

303: !     petsc.h       - base PETSc routines   petscvec.h - vectors
304: !     petscsys.h    - system routines       petscmat.h - matrices
305: !     petscis.h     - index sets            petscksp.h - Krylov subspace methods
306: !     petscviewer.h - viewers               petscpc.h  - preconditioners

308:  #include include/finclude/petsc.h
309:  #include include/finclude/petscis.h
310:  #include include/finclude/petscvec.h
311:  #include include/finclude/petscmat.h
312:  #include include/finclude/petscpc.h
313:  #include include/finclude/petscksp.h
314:  #include include/finclude/petscda.h
315:       PetscErrorCode    ierr
316:       PetscOffset      idx
317:       Vec              X,localX,localF
318:       PetscInt          i,j,row,mx,my, xs,ys,xm
319:       PetscInt          ym,gxm,gym,gxs,gys
320:       double precision one,lambda,temp1,temp,hx,hy
321:       double precision hxdhy,hydhx,sc
322:       PetscScalar      xx(1)
323:       DA               da
324:       Mat              B
325:       common   /mycommon/ mx,my,B,localX,localF,da
326: 
327:       one    = 1.d0
328:       lambda = 6.d0
329:       hx     = one/(mx-1)
330:       hy     = one/(my-1)
331:       sc     = hx*hy*lambda
332:       hxdhy  = hx/hy
333:       hydhx  = hy/hx
334:       temp1  = lambda/(lambda + one)

336: !  Get a pointer to vector data.
337: !    - VecGetArray() returns a pointer to the data array.
338: !    - You MUST call VecRestoreArray() when you no longer need access to
339: !      the array.
340:        call VecGetArray(localX,xx,idx,ierr)

342: !  Get local grid boundaries (for 2-dimensional DA):
343: !    xs, ys   - starting grid indices (no ghost points)
344: !    xm, ym   - widths of local grid (no ghost points)
345: !    gxs, gys - starting grid indices (including ghost points)
346: !    gxm, gym - widths of local grid (including ghost points)

348:        call DAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,             &
349:      &      PETSC_NULL_INTEGER,ierr)
350:        call DAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,    &
351:      &      PETSC_NULL_INTEGER,ierr)

353: !  Compute initial guess over the locally owned part of the grid

355:       do 30 j=ys,ys+ym-1
356:         temp = (min(j,my-j-1))*hy
357:         do 40 i=xs,xs+xm-1
358:           row = i - gxs + (j - gys)*gxm + 1
359:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or.              &
360:      &        j .eq. my-1) then
361:             xx(idx+row) = 0.d0
362:             continue
363:           endif
364:           xx(idx+row) = temp1*sqrt(min((min(i,mx-i-1))*hx,temp))
365:  40     continue
366:  30   continue

368: !     Restore vector

370:        call VecRestoreArray(localX,xx,idx,ierr)

372: !     Insert values into global vector

374:        call DALocalToGlobal(da,localX,INSERT_VALUES,X,ierr)
375:        return
376:        end

378: ! -------------------------------------------------------------------
379: !
380: !   ComputeFunction - Evaluates nonlinear function, F(x).
381: !
382: !   Input Parameters:
383: !.  X - input vector
384: !
385: !   Output Parameter:
386: !.  F - function vector
387: !
388:       subroutine  ComputeFunction(X,F,ierr)
389:       implicit none

391: !     petsc.h       - base PETSc routines   petscvec.h - vectors
392: !     petscsys.h    - system routines       petscmat.h - matrices
393: !     petscis.h     - index sets            petscksp.h - Krylov subspace methods
394: !     petscviewer.h - viewers               petscpc.h  - preconditioners

396:  #include include/finclude/petsc.h
397:  #include include/finclude/petscis.h
398:  #include include/finclude/petscvec.h
399:  #include include/finclude/petscmat.h
400:  #include include/finclude/petscpc.h
401:  #include include/finclude/petscksp.h
402:  #include include/finclude/petscda.h

404:       Vec              X,F,localX,localF
405:       PetscInt         gys,gxm,gym
406:       PetscOffset      idx,idf
407:       PetscErrorCode ierr
408:       PetscInt i,j,row,mx,my,xs,ys,xm,ym,gxs
409:       double precision two,one,lambda,hx
410:       double precision hy,hxdhy,hydhx,sc
411:       PetscScalar      u,uxx,uyy,xx(1),ff(1)
412:       DA               da
413:       Mat              B
414:       common   /mycommon/ mx,my,B,localX,localF,da

416:       two    = 2.d0
417:       one    = 1.d0
418:       lambda = 6.d0

420:       hx     = one/(mx-1)
421:       hy     = one/(my-1)
422:       sc     = hx*hy*lambda
423:       hxdhy  = hx/hy
424:       hydhx  = hy/hx

426: !  Scatter ghost points to local vector, using the 2-step process
427: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
428: !  By placing code between these two statements, computations can be
429: !  done while messages are in transition.
430: !
431:       call DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
432:       call DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

434: !  Get pointers to vector data

436:       call VecGetArray(localX,xx,idx,ierr)
437:       call VecGetArray(localF,ff,idf,ierr)

439: !  Get local grid boundaries

441:       call DAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,              &
442:      &     PETSC_NULL_INTEGER,ierr)
443:       call DAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,     &
444:      &     PETSC_NULL_INTEGER,ierr)

446: !  Compute function over the locally owned part of the grid

448:       do 50 j=ys,ys+ym-1

450:         row = (j - gys)*gxm + xs - gxs
451:         do 60 i=xs,xs+xm-1
452:           row = row + 1

454:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or.              &
455:      &        j .eq. my-1) then
456:             ff(idf+row) = xx(idx+row)
457:             goto 60
458:           endif
459:           u   = xx(idx+row)
460:           uxx = (two*u - xx(idx+row-1) - xx(idx+row+1))*hydhx
461:           uyy = (two*u - xx(idx+row-gxm) - xx(idx+row+gxm))*hxdhy
462:           ff(idf+row) = uxx + uyy - sc*exp(u)
463:  60     continue
464:  50   continue

466: !  Restore vectors

468:        call VecRestoreArray(localX,xx,idx,ierr)
469:        call VecRestoreArray(localF,ff,idf,ierr)

471: !  Insert values into global vector

473:        call DALocalToGlobal(da,localF,INSERT_VALUES,F,ierr)
474:        return
475:        end

477: ! -------------------------------------------------------------------
478: !
479: !   ComputeJacobian - Evaluates Jacobian matrix.
480: !
481: !   Input Parameters:
482: !   x - input vector
483: !
484: !   Output Parameters:
485: !   jac - Jacobian matrix
486: !   flag - flag indicating matrix structure
487: !
488: !   Notes:
489: !   Due to grid point reordering with DAs, we must always work
490: !   with the local grid points, and then transform them to the new
491: !   global numbering with the 'ltog' mapping (via DAGetGlobalIndices()).
492: !   We cannot work directly with the global numbers for the original
493: !   uniprocessor grid!
494: !
495:       subroutine ComputeJacobian(X,jac,ierr)
496:       implicit none

498: !     petsc.h  - base PETSc routines   petscvec.h - vectors
499: !     petscsys.h    - system routines       petscmat.h - matrices
500: !     petscis.h     - index sets            petscksp.h - Krylov subspace methods
501: !     petscviewer.h - viewers               petscpc.h  - preconditioners

503:  #include include/finclude/petsc.h
504:  #include include/finclude/petscis.h
505:  #include include/finclude/petscvec.h
506:  #include include/finclude/petscmat.h
507:  #include include/finclude/petscpc.h
508:  #include include/finclude/petscksp.h
509:  #include include/finclude/petscda.h

511:       Vec         X
512:       Mat         jac
513:       Vec         localX,localF
514:       DA          da
515:       PetscInt     ltog(1)
516:       PetscOffset idltog,idx
517:       PetscErrorCode ierr
518:       PetscInt nloc,xs,ys,xm,ym,gxs,gys,gxm,gym,grow,i,j,row,mx,my
519:       PetscInt ione,col(5),ifive
520:       PetscScalar two,one,lambda,v(5),hx,hy,hxdhy
521:       PetscScalar hydhx,sc,xx(1)
522:       Mat         B
523:       common   /mycommon/ mx,my,B,localX,localF,da

525:       ione   = 1
526:       ifive  = 5
527:       one    = 1.d0
528:       two    = 2.d0
529:       hx     = one/(mx-1)
530:       hy     = one/(my-1)
531:       sc     = hx*hy
532:       hxdhy  = hx/hy
533:       hydhx  = hy/hx
534:       lambda = 6.d0

536: !  Scatter ghost points to local vector, using the 2-step process
537: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
538: !  By placing code between these two statements, computations can be
539: !  done while messages are in transition.

541:       call DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
542:       call DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)

544: !  Get pointer to vector data

546:       call VecGetArray(localX,xx,idx,ierr)

548: !  Get local grid boundaries

550:       call DAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym,              &
551:      &     PETSC_NULL_INTEGER,ierr)
552:       call DAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym,     &
553:      &                        PETSC_NULL_INTEGER,ierr)

555: !  Get the global node numbers for all local nodes, including ghost points

557:       call DAGetGlobalIndices(da,nloc,ltog,idltog,ierr)

559: !  Compute entries for the locally owned part of the Jacobian.
560: !   - Currently, all PETSc parallel matrix formats are partitioned by
561: !     contiguous chunks of rows across the processors. The 'grow'
562: !     parameter computed below specifies the global row number
563: !     corresponding to each local grid point.
564: !   - Each processor needs to insert only elements that it owns
565: !     locally (but any non-local elements will be sent to the
566: !     appropriate processor during matrix assembly).
567: !   - Always specify global row and columns of matrix entries.
568: !   - Here, we set all entries for a particular row at once.

570:       do 10 j=ys,ys+ym-1
571:         row = (j - gys)*gxm + xs - gxs
572:         do 20 i=xs,xs+xm-1
573:           row = row + 1
574:           grow = ltog(idltog+row)
575:           if (i .eq. 0 .or. j .eq. 0 .or. i .eq. (mx-1) .or.            &
576:      &        j .eq. (my-1)) then
577:              call MatSetValues(jac,ione,grow,ione,grow,one,             &
578:      &                         INSERT_VALUES,ierr)
579:              go to 20
580:           endif
581:           v(1)   = -hxdhy
582:           col(1) = ltog(idltog+row - gxm)
583:           v(2)   = -hydhx
584:           col(2) = ltog(idltog+row - 1)
585:           v(3)   = two*(hydhx + hxdhy) - sc*lambda*exp(xx(idx+row))
586:           col(3) = grow
587:           v(4)   = -hydhx
588:           col(4) = ltog(idltog+row + 1)
589:           v(5)   = -hxdhy
590:           col(5) = ltog(idltog+row + gxm)
591:           call MatSetValues(jac,ione,grow,ifive,col,v,INSERT_VALUES,       &
592:      &                      ierr)
593:  20     continue
594:  10   continue

596: !  Assemble matrix, using the 2-step process:
597: !    MatAssemblyBegin(), MatAssemblyEnd().
598: !  By placing code between these two statements, computations can be
599: !  done while messages are in transition.

601:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
602:       call VecRestoreArray(localX,xx,idx,ierr)
603:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
604:       return
605:       end


608: ! -------------------------------------------------------------------
609: !
610: !   MyMult - user provided matrix multiply
611: !
612: !   Input Parameters:
613: !.  X - input vector
614: !
615: !   Output Parameter:
616: !.  F - function vector
617: !
618:       subroutine  MyMult(J,X,F,ierr)
619:       implicit none
620:       Mat     J,B
621:       Vec     X,F
622:       PetscErrorCode ierr
623:       PetscInt mx,my
624:       DA      da
625:       Vec     localX,localF

627:       common   /mycommon/ mx,my,B,localX,localF,da
628: !
629: !       Here we use the actual formed matrix B; users would
630: !     instead write their own matrix vector product routine
631: !
632:       call MatMult(B,X,F,ierr)
633:       return
634:       end