Actual source code: ex17.c

  1: /*$Id: ex17.c,v 1.19 2001/03/23 23:23:55 balay Exp $*/

  3: /* Usage:  mpirun ex2 [-help] [all PETSc options] */

  5: static char help[] = "Solves a linear system in parallel with SLES.n
  6: Input parameters include:n
  7:   -view_exact_sol   : write exact solution vector to stdoutn
  8:   -m <mesh_x>       : number of mesh points in x-directionn
  9:   -n <mesh_n>       : number of mesh points in y-directionnn";

 11: /*T
 12:    Concepts: Laplacian, 2d
 13:    Processors: n
 14: T*/

 16: /* 
 17:   Include "petscsles.h" so that we can use SLES solvers.  Note that this file
 18:   automatically includes:
 19:      petsc.h       - base PETSc routines   petscvec.h - vectors
 20:      petscsys.h    - system routines       petscmat.h - matrices
 21:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 22:      petscviewer.h - viewers               petscpc.h  - preconditioners
 23: */
 24: #include "petscsles.h"

 26: int main(int argc,char **args)
 27: {
 28:   Vec         x,b,u;  /* approx solution, RHS, exact solution */
 29:   Mat         A;        /* linear system matrix */
 30:   SLES        sles;     /* linear solver context */
 31:   PetscRandom rctx;     /* random number generator context */
 32:   double      norm;     /* norm of solution error */
 33:   int         i,I,Istart,Iend,ierr,m = 5,n = 5,its,*cols;
 34:   Scalar      neg_one = -1.0,*ua;
 35:   PetscTruth  flg;

 37:   PetscInitialize(&argc,&args,(char *)0,help);
 38:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 39:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 40:   PetscPrintf(PETSC_COMM_WORLD,"system size: m=%d, n=%dn",m,n);
 41:   if (m < n) SETERRQ(1,"Supports m >= n only!");

 43:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
 44:          Compute the matrix and right-hand-side vector that define
 45:          the linear system, Ax = b.
 46:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 48:   /* 
 49:      Create parallel vectors.
 50:       - When using VecCreate() and VecSetFromOptions(), we specify only the vector's global
 51:         dimension; the parallel partitioning is determined at runtime. 
 52:       - When solving a linear system, the vectors and matrices MUST
 53:         be partitioned accordingly.  PETSc automatically generates
 54:         appropriately partitioned matrices and vectors when MatCreate()
 55:         and VecCreate() are used with the same communicator. 
 56:       - Note: We form 1 vector from scratch and then duplicate as needed.
 57:   */
 58:   VecCreate(PETSC_COMM_WORLD,PETSC_DECIDE,n,&u);
 59:   VecSetFromOptions(u);
 60:   VecDuplicate(u,&x);
 61:   VecCreate(PETSC_COMM_WORLD,PETSC_DECIDE,m,&b);
 62:   VecSetFromOptions(b);

 64:   /* 
 65:      Set exact solution with random components.
 66:   */
 67:   PetscRandomCreate(PETSC_COMM_WORLD,RANDOM_DEFAULT,&rctx);
 68:   VecSetRandom(rctx,u);
 69:   VecAssemblyBegin(u);
 70:   VecAssemblyEnd(u);

 72:   /* 
 73:      Create parallel matrix, specifying only its global dimensions.
 74:      When using MatCreate(), the matrix format can be specified at
 75:      runtime. Also, the parallel partitioning of the matrix is
 76:      determined by PETSc at runtime.
 77:   */
 78:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m,n,&A);
 79:   MatSetFromOptions(A);

 81:   /* 
 82:      Currently, all PETSc parallel matrix formats are partitioned by
 83:      contiguous chunks of rows across the processors.  Determine which
 84:      rows of the matrix are locally owned. 
 85:   */
 86:   MatGetOwnershipRange(A,&Istart,&Iend);

 88:   /* 
 89:      Set matrix elements in parallel.
 90:       - Each processor needs to insert only elements that it owns
 91:         locally (but any non-local elements will be sent to the
 92:         appropriate processor during matrix assembly). 
 93:       - Always specify global rows and columns of matrix entries.
 94:    */
 95:   VecGetArray(u,&ua);
 96:   PetscMalloc(n*sizeof(int),&cols);
 97:   for (i=0; i<n; i++) {
 98:     cols[i] = i;
 99:   }
100:   for (I=Istart; I<Iend; I++) {
101:     VecSetRandom(rctx,u);
102:     MatSetValues(A,1,&I,n,cols,ua,INSERT_VALUES);
103:   }

105:   /* 
106:      Assemble matrix, using the 2-step process:
107:        MatAssemblyBegin(), MatAssemblyEnd()
108:      Computations can be done while messages are in transition
109:      by placing code between these two statements.
110:   */
111:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
112:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

114:   /*
115:       Compute right-hand-side vector.
116:   */
117:   MatMult(A,u,b);

119:   /*
120:      View the exact solution vector if desired
121:   */
122:   PetscOptionsHasName(PETSC_NULL,"-view_exact_sol",&flg);
123:   if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}

125:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
126:                 Create the linear solver and set various options
127:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

129:   /* 
130:      Create linear solver context
131:   */
132:   SLESCreate(PETSC_COMM_WORLD,&sles);

134:   /* 
135:      Set operators. Here the matrix that defines the linear system
136:      also serves as the preconditioning matrix.
137:   */
138:   SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);

140:   /* 
141:     Set runtime options, e.g.,
142:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
143:     These options will override those specified above as long as
144:     SLESSetFromOptions() is called _after_ any other customization
145:     routines.
146:   */
147:   SLESSetFromOptions(sles);

149:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
150:                       Solve the linear system
151:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

153:   SLESSolve(sles,b,x,&its);

155:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
156:                       Check solution and clean up
157:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

159:   /* 
160:      Check the error
161:   */
162:   VecAXPY(&neg_one,u,x);
163:   VecNorm(x,NORM_2,&norm);

165:   /*
166:      Print convergence information.  PetscPrintf() produces a single 
167:      print statement from all processes that share a communicator.
168:   */
169:   PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %dn",norm,its);

171:   /* 
172:      Free work space.  All PETSc objects should be destroyed when they
173:      are no longer needed.
174:   */
175:   SLESDestroy(sles);
176:   VecDestroy(u);  VecDestroy(x);
177:   VecDestroy(b);  MatDestroy(A);
178:   PetscRandomDestroy(rctx);
179:   PetscFree(cols);

181:   /*
182:      Always call PetscFinalize() before exiting a program.  This routine
183:        - finalizes the PETSc libraries as well as MPI
184:        - provides summary and diagnostic information if certain runtime
185:          options are chosen (e.g., -log_summary). 
186:   */
187:   PetscFinalize();
188:   return 0;
189: }