Actual source code: dagtona.c

  1: /*$Id: dagtona.c,v 1.10 2001/03/23 23:25:00 balay Exp $*/
  2: 
  3: /*
  4:      Tools to help solve the coarse grid problem redundantly.
  5:   Provides two scatter contexts that (1) map from the usual global vector
  6:   to all processors the entire vector in NATURAL numbering and (2)
  7:   from the entire vector on each processor in natural numbering extracts
  8:   out this processors piece in GLOBAL numbering
  9: */

 11:  #include src/dm/da/daimpl.h

 13: #undef __FUNCT__  
 15: /*
 16:    DAGlobalToNaturalAllCreate - Creates a scatter context that maps from the 
 17:      global vector the entire vector to each processor in natural numbering

 19:    Collective on DA

 21:    Input Parameter:
 22: .  da - the distributed array context

 24:    Output Parameter:
 25: .  scatter - the scatter context

 27:    Level: advanced

 29: .keywords: distributed array, global to local, begin, coarse problem

 31: .seealso: DAGlobalToNaturalEnd(), DALocalToGlobal(), DACreate2d(), 
 32:           DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DACreateNaturalVector()
 33: */
 34: int DAGlobalToNaturalAllCreate(DA da,VecScatter *scatter)
 35: {
 36:   int ierr,N;
 37:   IS  from,to;
 38:   Vec tmplocal,global;
 39:   AO  ao;

 43:   DAGetAO(da,&ao);

 45:   /* create the scatter context */
 46:   ISCreateStride(da->comm,da->Nlocal,0,1,&to);
 47:   AOPetscToApplicationIS(ao,to);
 48:   ISCreateStride(da->comm,da->Nlocal,0,1,&from);
 49:   MPI_Allreduce(&da->Nlocal,&N,1,MPI_INT,MPI_SUM,da->comm);
 50:   VecCreateSeqWithArray(PETSC_COMM_SELF,N,0,&tmplocal);
 51:   VecCreateMPIWithArray(da->comm,da->Nlocal,PETSC_DETERMINE,0,&global);
 52:   VecScatterCreate(global,from,tmplocal,to,scatter);
 53:   VecDestroy(tmplocal);
 54:   VecDestroy(global);
 55:   ISDestroy(from);
 56:   ISDestroy(to);
 57:   return(0);
 58: }

 60: #undef __FUNCT__  
 62: /*
 63:    DANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
 64:      of the entire vector on each processor to its local part in the global vector.

 66:    Collective on DA

 68:    Input Parameter:
 69: .  da - the distributed array context

 71:    Output Parameter:
 72: .  scatter - the scatter context

 74:    Level: advanced

 76: .keywords: distributed array, global to local, begin, coarse problem

 78: .seealso: DAGlobalToNaturalEnd(), DALocalToGlobal(), DACreate2d(), 
 79:           DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DACreateNaturalVector()
 80: */
 81: int DANaturalAllToGlobalCreate(DA da,VecScatter *scatter)
 82: {
 83:   int ierr,M,m = da->Nlocal,start;
 84:   IS  from,to;
 85:   Vec tmplocal,global;
 86:   AO  ao;

 90:   DAGetAO(da,&ao);

 92:   /* create the scatter context */
 93:   MPI_Allreduce(&m,&M,1,MPI_INT,MPI_SUM,da->comm);
 94:   VecCreateMPIWithArray(da->comm,m,PETSC_DETERMINE,0,&global);
 95:   VecGetOwnershipRange(global,&start,PETSC_NULL);
 96:   ISCreateStride(da->comm,m,start,1,&from);
 97:   AOPetscToApplicationIS(ao,from);
 98:   ISCreateStride(da->comm,m,start,1,&to);
 99:   VecCreateSeqWithArray(PETSC_COMM_SELF,M,0,&tmplocal);
100:   VecScatterCreate(tmplocal,from,global,to,scatter);
101:   VecDestroy(tmplocal);
102:   VecDestroy(global);
103:   ISDestroy(from);
104:   ISDestroy(to);
105:   return(0);
106: }