Actual source code: ex23.c

  1: /*$Id: ex23.c,v 1.9 2001/03/23 23:23:55 balay Exp $*/

  3: /* Program usage:  mpirun ex23 [-help] [all PETSc options] */

  5: static char help[] = "Solves a tridiagonal linear system.nn";

  7: /*T
  8:    Concepts: SLES^basic parallel example;
  9:    Processors: n
 10: T*/

 12: /* 
 13:   Include "petscsles.h" so that we can use SLES solvers.  Note that this file
 14:   automatically includes:
 15:      petsc.h       - base PETSc routines   petscvec.h - vectors
 16:      petscsys.h    - system routines       petscmat.h - matrices
 17:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 18:      petscviewer.h - viewers               petscpc.h  - preconditioners

 20:   Note:  The corresponding uniprocessor example is ex1.c
 21: */
 22: #include "petscsles.h"

 24: int main(int argc,char **args)
 25: {
 26:   Vec     x, b, u;      /* approx solution, RHS, exact solution */
 27:   Mat     A;            /* linear system matrix */
 28:   SLES    sles;         /* linear solver context */
 29:   PC      pc;           /* preconditioner context */
 30:   KSP     ksp;          /* Krylov subspace method context */
 31:   double  norm;         /* norm of solution error */
 32:   int     ierr,i,n = 10,col[3],its,rstart,rend,nlocal;
 33:   Scalar  neg_one = -1.0,one = 1.0,value[3];

 35:   PetscInitialize(&argc,&args,(char *)0,help);
 36:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);

 38:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
 39:          Compute the matrix and right-hand-side vector that define
 40:          the linear system, Ax = b.
 41:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 43:   /* 
 44:      Create vectors.  Note that we form 1 vector from scratch and
 45:      then duplicate as needed. For this simple case let PETSc decide how
 46:      many elements of the vector are stored on each processor. The second
 47:      argument to VecCreate() below causes PETSc to decide.
 48:   */
 49:   VecCreate(PETSC_COMM_WORLD,PETSC_DECIDE,n,&x);
 50:   VecSetFromOptions(x);
 51:   VecDuplicate(x,&b);
 52:   VecDuplicate(x,&u);

 54:   /* Identify the starting and ending mesh points on each
 55:      processor for the interior part of the mesh. We let PETSc decide
 56:      above. */

 58:   VecGetOwnershipRange(x,&rstart,&rend);
 59:   VecGetLocalSize(x,&nlocal);

 61:   /* 
 62:      Create matrix.  When using MatCreate(), the matrix format can
 63:      be specified at runtime.

 65:      Performance tuning note:  For problems of substantial size,
 66:      preallocation of matrix memory is crucial for attaining good 
 67:      performance.  Since preallocation is not possible via the generic
 68:      matrix creation routine MatCreate(), we recommend for practical 
 69:      problems instead to use the creation routine for a particular matrix
 70:      format, e.g.,
 71:          MatCreateMPIAIJ() - sequential AIJ (compressed sparse row)
 72:          MatCreateMPIBAIJ() - block AIJ
 73:      See the matrix chapter of the users manual for details.

 75:      We pass in nlocal as the "local" size of the matrix to force it
 76:      to have the same parallel layout as the vector created above.
 77:   */
 78:   MatCreate(PETSC_COMM_WORLD,nlocal,nlocal,n,n,&A);
 79:   MatSetFromOptions(A);

 81:   /* 
 82:      Assemble matrix.  

 84:      The linear system is distributed across the processors by 
 85:      chunks of contiguous rows, which correspond to contiguous
 86:      sections of the mesh on which the problem is discretized.  
 87:      For matrix assembly, each processor contributes entries for
 88:      the part that it owns locally.
 89:   */


 92:   if (!rstart) {
 93:     rstart = 1;
 94:     i = 0; col[0] = 0; col[1] = 1; value[0] = 2.0; value[1] = -1.0;
 95:     MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);
 96:   }
 97:   if (rend == n) {
 98:     rend = n-1;
 99:     i = n-1; col[0] = n-2; col[1] = n-1; value[0] = -1.0; value[1] = 2.0;
100:     MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);
101:   }

103:   /* Set entries corresponding to the mesh interior */
104:   value[0] = -1.0; value[1] = 2.0; value[2] = -1.0;
105:   for (i=rstart; i<rend; i++) {
106:     col[0] = i-1; col[1] = i; col[2] = i+1;
107:     MatSetValues(A,1,&i,3,col,value,INSERT_VALUES);
108:   }

110:   /* Assemble the matrix */
111:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
112:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

114:   /* 
115:      Set exact solution; then compute right-hand-side vector.
116:   */
117:   VecSet(&one,u);
118:   MatMult(A,u,b);

120:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
121:                 Create the linear solver and set various options
122:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123:   /* 
124:      Create linear solver context
125:   */
126:   SLESCreate(PETSC_COMM_WORLD,&sles);

128:   /* 
129:      Set operators. Here the matrix that defines the linear system
130:      also serves as the preconditioning matrix.
131:   */
132:   SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);

134:   /* 
135:      Set linear solver defaults for this problem (optional).
136:      - By extracting the KSP and PC contexts from the SLES context,
137:        we can then directly call any KSP and PC routines to set
138:        various options.
139:      - The following four statements are optional; all of these
140:        parameters could alternatively be specified at runtime via
141:        SLESSetFromOptions();
142:   */
143:   SLESGetKSP(sles,&ksp);
144:   SLESGetPC(sles,&pc);
145:   PCSetType(pc,PCJACOBI);
146:   KSPSetTolerances(ksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);

148:   /* 
149:     Set runtime options, e.g.,
150:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
151:     These options will override those specified above as long as
152:     SLESSetFromOptions() is called _after_ any other customization
153:     routines.
154:   */
155:   SLESSetFromOptions(sles);
156: 
157:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
158:                       Solve the linear system
159:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
160:   /* 
161:      Solve linear system
162:   */
163:   SLESSolve(sles,b,x,&its);

165:   /* 
166:      View solver info; we could instead use the option -sles_view to
167:      print this info to the screen at the conclusion of SLESSolve().
168:   */
169:   SLESView(sles,PETSC_VIEWER_STDOUT_WORLD);

171:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
172:                       Check solution and clean up
173:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
174:   /* 
175:      Check the error
176:   */
177:   VecAXPY(&neg_one,u,x);
178:   ierr  = VecNorm(x,NORM_2,&norm);
179:   PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A, Iterations %dn",norm,its);
180:   /* 
181:      Free work space.  All PETSc objects should be destroyed when they
182:      are no longer needed.
183:   */
184:   VecDestroy(x); VecDestroy(u);
185:   VecDestroy(b); MatDestroy(A);
186:   SLESDestroy(sles);

188:   /*
189:      Always call PetscFinalize() before exiting a program.  This routine
190:        - finalizes the PETSc libraries as well as MPI
191:        - provides summary and diagnostic information if certain runtime
192:          options are chosen (e.g., -log_summary).
193:   */
194:   PetscFinalize();
195:   return 0;
196: }