Actual source code: ex8.c
1: /*$Id: ex8.c,v 1.22 2001/04/10 19:35:06 bsmith Exp $*/
3: static char help[] = "Demonstrates using a local ordering to set values into a parallel vector.nn";
5: /*T
6: Concepts: vectors^assembling vectors with local ordering;
7: Processors: n
8: T*/
10: /*
11: Include "petscvec.h" so that we can use vectors. Note that this file
12: automatically includes:
13: petsc.h - base PETSc routines petscis.h - index sets
14: petscsys.h - system routines petscviewer.h - viewers
15: */
16: #include "petscvec.h"
18: int main(int argc,char **argv)
19: {
20: int i,N,ierr,rank,ng,*gindices,rstart,rend,M;
21: Scalar one = 1.0;
22: Vec x;
24: PetscInitialize(&argc,&argv,(char *)0,help);
25: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
27: /*
28: Create a parallel vector.
29: - In this case, we specify the size of each processor's local
30: portion, and PETSc computes the global size. Alternatively,
31: PETSc could determine the vector's distribution if we specify
32: just the global size.
33: */
34: VecCreateMPI(PETSC_COMM_WORLD,rank+1,PETSC_DECIDE,&x);
35: VecGetSize(x,&N);
36: VecSet(&one,x);
38: /*
39: Set the local to global ordering for the vector. Each processor
40: generates a list of the global indices for each local index. Note that
41: the local indices are just whatever is convenient for a particular application.
42: In this case we treat the vector as lying on a one dimensional grid and
43: have one ghost point on each end of the blocks owned by each processor.
44: */
46: VecGetSize(x,&M);
47: VecGetOwnershipRange(x,&rstart,&rend);
48: ng = rend - rstart + 2;
49: PetscMalloc(ng*sizeof(int),&gindices);
50: gindices[0] = rstart - 1;
51: for (i=0; i<ng-1; i++) {
52: gindices[i+1] = gindices[i] + 1;
53: }
54: /* map the first and last point as periodic */
55: if (gindices[0] == -1) gindices[0] = M - 1;
56: if (gindices[ng-1] == M) gindices[ng-1] = 0;
57: {
58: ISLocalToGlobalMapping ltog;
59: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,ng,gindices,<og);
60: VecSetLocalToGlobalMapping(x,ltog);
61: ISLocalToGlobalMappingDestroy(ltog);
62: }
63: PetscFree(gindices);
65: /*
66: Set the vector elements.
67: - In this case set the values using the local ordering
68: - Each processor can contribute any vector entries,
69: regardless of which processor "owns" them; any nonlocal
70: contributions will be transferred to the appropriate processor
71: during the assembly process.
72: - In this example, the flag ADD_VALUES indicates that all
73: contributions will be added together.
74: */
75: for (i=0; i<ng; i++) {
76: VecSetValuesLocal(x,1,&i,&one,ADD_VALUES);
77: }
79: /*
80: Assemble vector, using the 2-step process:
81: VecAssemblyBegin(), VecAssemblyEnd()
82: Computations can be done while messages are in transition
83: by placing code between these two statements.
84: */
85: VecAssemblyBegin(x);
86: VecAssemblyEnd(x);
88: /*
89: View the vector; then destroy it.
90: */
91: VecView(x,PETSC_VIEWER_STDOUT_WORLD);
92: VecDestroy(x);
94: PetscFinalize();
95: return 0;
96: }
97: