Actual source code: vpscat.h

petsc-3.5.4 2015-05-23
Report Typos and Errors
  2: /*
  3:      Defines the methods VecScatterBegin/End_1,2,......
  4:      This is included by vpscat.c with different values for BS

  6:      This is a terrible way of doing "templates" in C.
  7: */
  8: #define PETSCMAP1_a(a,b)  a ## _ ## b
  9: #define PETSCMAP1_b(a,b)  PETSCMAP1_a(a,b)
 10: #define PETSCMAP1(a)      PETSCMAP1_b(a,BS)

 14: PetscErrorCode PETSCMAP1(VecScatterBegin)(VecScatter ctx,Vec xin,Vec yin,InsertMode addv,ScatterMode mode)
 15: {
 16:   VecScatter_MPI_General *to,*from;
 17:   PetscScalar            *xv,*yv,*svalues;
 18:   MPI_Request            *rwaits,*swaits;
 19:   PetscErrorCode         ierr;
 20:   PetscInt               i,*indices,*sstarts,nrecvs,nsends,bs;

 23:   if (mode & SCATTER_REVERSE) {
 24:     to     = (VecScatter_MPI_General*)ctx->fromdata;
 25:     from   = (VecScatter_MPI_General*)ctx->todata;
 26:     rwaits = from->rev_requests;
 27:     swaits = to->rev_requests;
 28:   } else {
 29:     to     = (VecScatter_MPI_General*)ctx->todata;
 30:     from   = (VecScatter_MPI_General*)ctx->fromdata;
 31:     rwaits = from->requests;
 32:     swaits = to->requests;
 33:   }
 34:   bs      = to->bs;
 35:   svalues = to->values;
 36:   nrecvs  = from->n;
 37:   nsends  = to->n;
 38:   indices = to->indices;
 39:   sstarts = to->starts;
 40: #if defined(PETSC_HAVE_CUSP)

 42:   if (xin->valid_GPU_array == PETSC_CUSP_GPU) {
 43:     if (xin->spptr && ctx->spptr) {
 44:       VecCUSPCopyFromGPUSome_Public(xin,(PetscCUSPIndices)ctx->spptr);
 45:     }
 46:   }
 47:   xv = *(PetscScalar**)xin->data;

 49: #if 0
 50:   if (!xin->map->n || ((xin->map->n > 10000) && (sstarts[nsends]*bs < 0.05*xin->map->n) && (xin->valid_GPU_array == PETSC_CUSP_GPU) && !(to->local.n))) {
 51:     if (!ctx->spptr) {
 52:       PetscInt k,*tindices,n = sstarts[nsends],*sindices;
 53:       PetscMalloc1(n,&tindices);
 54:       PetscMemcpy(tindices,to->indices,n*sizeof(PetscInt));
 55:       PetscSortRemoveDupsInt(&n,tindices);
 56:       PetscMalloc1(bs*n,&sindices);
 57:       for (i=0; i<n; i++) {
 58:         for (k=0; k<bs; k++) sindices[i*bs+k] = tindices[i]+k;
 59:       }
 60:       PetscFree(tindices);
 61:       VecScatterCUSPIndicesCreate_PtoP(n*bs,sindices,n*bs,sindices,(PetscCUSPIndices*)&ctx->spptr);
 62:       PetscFree(sindices);
 63:     }
 64:     VecCUSPCopyFromGPUSome_Public(xin,(PetscCUSPIndices)ctx->spptr);
 65:     xv   = *(PetscScalar**)xin->data;
 66:   } else {
 67:     VecGetArrayRead(xin,(const PetscScalar**)&xv);
 68:   }
 69: #endif

 71: #else
 72:   VecGetArrayRead(xin,(const PetscScalar**)&xv);
 73: #endif

 75:   if (xin != yin) {VecGetArray(yin,&yv);}
 76:   else yv = xv;

 78:   if (!(mode & SCATTER_LOCAL)) {
 79:     if (!from->use_readyreceiver && !to->sendfirst && !to->use_alltoallv  & !to->use_window) {
 80:       /* post receives since they were not previously posted    */
 81:       if (nrecvs) {MPI_Startall_irecv(from->starts[nrecvs]*bs,nrecvs,rwaits);}
 82:     }

 84: #if defined(PETSC_HAVE_MPI_ALLTOALLW)  && !defined(PETSC_USE_64BIT_INDICES)
 85:     if (to->use_alltoallw && addv == INSERT_VALUES) {
 86:       MPI_Alltoallw(xv,to->wcounts,to->wdispls,to->types,yv,from->wcounts,from->wdispls,from->types,PetscObjectComm((PetscObject)ctx));
 87:     } else
 88: #endif
 89:     if (ctx->packtogether || to->use_alltoallv || to->use_window) {
 90:       /* this version packs all the messages together and sends, when -vecscatter_packtogether used */
 91:       PETSCMAP1(Pack)(sstarts[nsends],indices,xv,svalues,bs);
 92:       if (to->use_alltoallv) {
 93:         MPI_Alltoallv(to->values,to->counts,to->displs,MPIU_SCALAR,from->values,from->counts,from->displs,MPIU_SCALAR,PetscObjectComm((PetscObject)ctx));
 94: #if defined(PETSC_HAVE_MPI_WIN_CREATE)
 95:       } else if (to->use_window) {
 96:         PetscInt cnt;

 98:         MPI_Win_fence(0,from->window);
 99:         for (i=0; i<nsends; i++) {
100:           cnt  = bs*(to->starts[i+1]-to->starts[i]);
101:           MPI_Put(to->values+bs*to->starts[i],cnt,MPIU_SCALAR,to->procs[i],bs*to->winstarts[i],cnt,MPIU_SCALAR,from->window);
102:         }
103: #endif
104:       } else if (nsends) {
105:         MPI_Startall_isend(to->starts[to->n],nsends,swaits);
106:       }
107:     } else {
108:       /* this version packs and sends one at a time */
109:       for (i=0; i<nsends; i++) {
110:         PETSCMAP1(Pack)(sstarts[i+1]-sstarts[i],indices + sstarts[i],xv,svalues + bs*sstarts[i],bs);
111:         MPI_Start_isend(sstarts[i+1]-sstarts[i],swaits+i);
112:       }
113:     }

115:     if (!from->use_readyreceiver && to->sendfirst && !to->use_alltoallv && !to->use_window) {
116:       /* post receives since they were not previously posted   */
117:       if (nrecvs) {MPI_Startall_irecv(from->starts[nrecvs]*bs,nrecvs,rwaits);}
118:     }
119:   }

121:   /* take care of local scatters */
122:   if (to->local.n) {
123:     if (to->local.is_copy && addv == INSERT_VALUES) {
124:       PetscMemcpy(yv + from->local.copy_start,xv + to->local.copy_start,to->local.copy_length);
125:     } else {
126:       PETSCMAP1(Scatter)(to->local.n,to->local.vslots,xv,from->local.vslots,yv,addv,bs);
127:     }
128:   }
129: #if defined(PETSC_HAVE_CUSP)
130:   if (xin->valid_GPU_array != PETSC_CUSP_GPU) {
131:     VecRestoreArrayRead(xin,(const PetscScalar**)&xv);
132:   } else {
133:     VecRestoreArrayRead(xin,(const PetscScalar**)&xv);
134:   }
135: #else
136:   VecRestoreArrayRead(xin,(const PetscScalar**)&xv);
137: #endif
138:   if (xin != yin) {VecRestoreArray(yin,&yv);}
139:   return(0);
140: }

142: /* --------------------------------------------------------------------------------------*/

146: PetscErrorCode PETSCMAP1(VecScatterEnd)(VecScatter ctx,Vec xin,Vec yin,InsertMode addv,ScatterMode mode)
147: {
148:   VecScatter_MPI_General *to,*from;
149:   PetscScalar            *rvalues,*yv;
150:   PetscErrorCode         ierr;
151:   PetscInt               nrecvs,nsends,*indices,count,*rstarts,bs;
152:   PetscMPIInt            imdex;
153:   MPI_Request            *rwaits,*swaits;
154:   MPI_Status             xrstatus,*rstatus,*sstatus;

157:   if (mode & SCATTER_LOCAL) return(0);
158:   VecGetArray(yin,&yv);

160:   to      = (VecScatter_MPI_General*)ctx->todata;
161:   from    = (VecScatter_MPI_General*)ctx->fromdata;
162:   rwaits  = from->requests;
163:   swaits  = to->requests;
164:   sstatus = to->sstatus;    /* sstatus and rstatus are always stored in to */
165:   rstatus = to->rstatus;
166:   if (mode & SCATTER_REVERSE) {
167:     to     = (VecScatter_MPI_General*)ctx->fromdata;
168:     from   = (VecScatter_MPI_General*)ctx->todata;
169:     rwaits = from->rev_requests;
170:     swaits = to->rev_requests;
171:   }
172:   bs      = from->bs;
173:   rvalues = from->values;
174:   nrecvs  = from->n;
175:   nsends  = to->n;
176:   indices = from->indices;
177:   rstarts = from->starts;

179:   if (ctx->packtogether || (to->use_alltoallw && (addv != INSERT_VALUES)) || (to->use_alltoallv && !to->use_alltoallw) || to->use_window) {
180: #if defined(PETSC_HAVE_MPI_WIN_CREATE)
181:     if (to->use_window) {MPI_Win_fence(0,from->window);}
182:     else
183: #endif
184:     if (nrecvs && !to->use_alltoallv) {MPI_Waitall(nrecvs,rwaits,rstatus);}
185:     PETSCMAP1(UnPack)(from->starts[from->n],from->values,indices,yv,addv,bs);
186:   } else if (!to->use_alltoallw) {
187:     /* unpack one at a time */
188:     count = nrecvs;
189:     while (count) {
190:       if (ctx->reproduce) {
191:         imdex = count - 1;
192:         MPI_Wait(rwaits+imdex,&xrstatus);
193:       } else {
194:         MPI_Waitany(nrecvs,rwaits,&imdex,&xrstatus);
195:       }
196:       /* unpack receives into our local space */
197:       PETSCMAP1(UnPack)(rstarts[imdex+1] - rstarts[imdex],rvalues + bs*rstarts[imdex],indices + rstarts[imdex],yv,addv,bs);
198:       count--;
199:     }
200:   }
201:   if (from->use_readyreceiver) {
202:     if (nrecvs) {MPI_Startall_irecv(from->starts[nrecvs]*bs,nrecvs,rwaits);}
203:     MPI_Barrier(PetscObjectComm((PetscObject)ctx));
204:   }

206:   /* wait on sends */
207:   if (nsends  && !to->use_alltoallv  && !to->use_window) {MPI_Waitall(nsends,swaits,sstatus);}
208:   VecRestoreArray(yin,&yv);
209:   return(0);
210: }

212: #undef PETSCMAP1_a
213: #undef PETSCMAP1_b
214: #undef PETSCMAP1
215: #undef BS