Bug Summary

File:vec/vscat/impls/sf/vscatsf.c
Warning:line 436, column 10
1st function call argument is an uninitialized value

Annotated Source Code

[?] Use j/k keys for keyboard navigation

1#include <petsc/private/vecscatterimpl.h> /*I "petscvec.h" I*/
2#include <petsc/private/sfimpl.h> /*I "petscsf.h" I*/
3
4typedef struct {
5 PetscSF sf; /* the whole scatter, including local and remote */
6 PetscSF lsf; /* the local part of the scatter, used for SCATTER_LOCAL */
7 PetscInt bs; /* block size */
8 MPI_Datatype unit; /* one unit = bs PetscScalars */
9} VecScatter_SF;
10
11static PetscErrorCode VecScatterBegin_SF(VecScatter vscat,Vec x,Vec y,InsertMode addv,ScatterMode mode)
12{
13 VecScatter_SF *data=(VecScatter_SF*)vscat->data;
14 PetscSF sf;
15 MPI_Op mop=MPI_OP_NULL((MPI_Op)0x18000000);
16 PetscErrorCode ierr;
17
18 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 18; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
19 if (x != y) {ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),19,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
20
21 {
22#if defined(PETSC_HAVE_CUDA)
23 PetscBool is_cudatype = PETSC_FALSE;
24 ierr = PetscObjectTypeCompareAny((PetscObject)x,&is_cudatype,VECSEQCUDA"seqcuda",VECMPICUDA"mpicuda",VECCUDA"cuda","");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),24,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
25 if (is_cudatype) {
26 VecCUDAAllocateCheckHost(x);
27 if (x->valid_GPU_array == PETSC_OFFLOAD_GPU) {
28 if (x->spptr && vscat->spptr) {ierr = VecCUDACopyFromGPUSome_Public(x,(PetscCUDAIndices)vscat->spptr,mode);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),28,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
29 else {ierr = VecCUDACopyFromGPU(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),29,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
30 }
31 vscat->xdata = *((PetscScalar**)x->data);
32 } else
33#endif
34 {
35 ierr = VecGetArrayRead(x,&vscat->xdata);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),35,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
36 }
37 }
38
39 if (x != y) {ierr = VecGetArray(y,&vscat->ydata);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),39,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
40 else vscat->ydata = (PetscScalar *)vscat->xdata;
41 ierr = VecLockWriteSet_Private(y,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),41,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
42
43 /* SCATTER_LOCAL indicates ignoring inter-process communication */
44 sf = (mode & SCATTER_LOCAL) ? data->lsf : data->sf;
45
46 if (addv == INSERT_VALUES) mop = MPI_REPLACE(MPI_Op)(0x5800000d);
47 else if (addv == ADD_VALUES) mop = MPI_SUM(MPI_Op)(0x58000003);
48 else if (addv == MAX_VALUES) mop = MPI_MAX(MPI_Op)(0x58000001);
49 else SETERRQ1(PetscObjectComm((PetscObject)sf),PETSC_ERR_SUP,"Unsupported InsertMode %D in VecScatterBegin/End",addv)return PetscError(PetscObjectComm((PetscObject)sf),49,__func__
,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,56,PETSC_ERROR_INITIAL,"Unsupported InsertMode %D in VecScatterBegin/End"
,addv)
;
50
51 if (mode & SCATTER_REVERSE) { /* reverse scatter sends root to leaf. Note that x and y are swapped in input */
52 ierr = PetscSFBcastAndOpBegin(sf,data->unit,vscat->xdata,vscat->ydata,mop);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),52,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
53 } else { /* forward scatter sends leaf to root, i.e., x to y */
54 ierr = PetscSFReduceBegin(sf,data->unit,vscat->xdata,vscat->ydata,mop);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),54,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
55 }
56 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
57}
58
59static PetscErrorCode VecScatterEnd_SF(VecScatter vscat,Vec x,Vec y,InsertMode addv,ScatterMode mode)
60{
61 VecScatter_SF *data=(VecScatter_SF*)vscat->data;
62 PetscSF sf;
63 MPI_Op mop=MPI_OP_NULL((MPI_Op)0x18000000);
64 PetscErrorCode ierr;
65
66 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 66; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
67 /* SCATTER_LOCAL indicates ignoring inter-process communication */
68 sf = (mode & SCATTER_LOCAL) ? data->lsf : data->sf;
69
70 if (addv == INSERT_VALUES) mop = MPI_REPLACE(MPI_Op)(0x5800000d);
71 else if (addv == ADD_VALUES) mop = MPI_SUM(MPI_Op)(0x58000003);
72 else if (addv == MAX_VALUES) mop = MPI_MAX(MPI_Op)(0x58000001);
73 else SETERRQ1(PetscObjectComm((PetscObject)sf),PETSC_ERR_SUP,"Unsupported InsertMode %D in VecScatterBegin/End",addv)return PetscError(PetscObjectComm((PetscObject)sf),73,__func__
,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,56,PETSC_ERROR_INITIAL,"Unsupported InsertMode %D in VecScatterBegin/End"
,addv)
;
74
75 if (mode & SCATTER_REVERSE) {/* reverse scatter sends root to leaf. Note that x and y are swapped in input */
76 ierr = PetscSFBcastAndOpEnd(sf,data->unit,vscat->xdata,vscat->ydata,mop);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),76,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
77 } else { /* forward scatter sends leaf to root, i.e., x to y */
78 ierr = PetscSFReduceEnd(sf,data->unit,vscat->xdata,vscat->ydata,mop);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),78,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
79 }
80
81 if (x != y) {
82 ierr = VecRestoreArrayRead(x,&vscat->xdata);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),82,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
83 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),83,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
84 }
85 ierr = VecRestoreArray(y,&vscat->ydata);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),85,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
86 ierr = VecLockWriteSet_Private(y,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),86,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
87 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
88}
89
90static PetscErrorCode VecScatterCopy_SF(VecScatter vscat,VecScatter ctx)
91{
92 VecScatter_SF *data=(VecScatter_SF*)vscat->data,*out;
93 PetscErrorCode ierr;
94
95 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 95; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
96 ierr = PetscMemcpy(ctx->ops,vscat->ops,sizeof(vscat->ops));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),96,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
97 ierr = PetscNewLog(ctx,&out)(PetscMallocA(1,PETSC_TRUE,97,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(1)*sizeof(**(((&out)))),(((&out)))) || PetscLogObjectMemory
((PetscObject)ctx,sizeof(**(&out))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),97,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
98 ierr = PetscSFDuplicate(data->sf,PETSCSF_DUPLICATE_GRAPH,&out->sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),98,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
99 ierr = PetscSFDuplicate(data->lsf,PETSCSF_DUPLICATE_GRAPH,&out->lsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),99,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
100 ierr = PetscSFSetUp(out->sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),100,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
101 ierr = PetscSFSetUp(out->lsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),101,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
102
103 out->bs = data->bs;
104 if (out->bs > 1) {
105 ierr = MPI_Type_dup(data->unit,&out->unit);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),105,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* Since oldtype is committed, so is newtype, according to MPI */
106 } else {
107 out->unit = MPIU_SCALAR((MPI_Datatype)0x4c00080b);
108 }
109 ctx->data = (void*)out;
110 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
111}
112
113static PetscErrorCode VecScatterDestroy_SF(VecScatter vscat)
114{
115 VecScatter_SF *data = (VecScatter_SF *)vscat->data;
116 PetscErrorCode ierr;
117
118 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 118; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
119 ierr = PetscSFDestroy(&data->sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),119,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
120 ierr = PetscSFDestroy(&data->lsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),120,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
121 if (data->bs > 1) {ierr = MPI_Type_free(&data->unit);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),121,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
122 ierr = PetscFree(vscat->data)((*PetscTrFree)((void*)(vscat->data),122,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
) || ((vscat->data) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),122,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
123 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
124}
125
126static PetscErrorCode VecScatterView_SF(VecScatter vscat,PetscViewer viewer)
127{
128 VecScatter_SF *data = (VecScatter_SF *)vscat->data;
129 PetscErrorCode ierr;
130
131 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 131; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
132 ierr = PetscSFView(data->sf,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),132,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
133 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
134}
135
136/* VecScatterRemap provides a light way to slightly modify a VecScatter. Suppose the input vscat scatters
137 x[i] to y[j], tomap gives a plan to change vscat to scatter x[tomap[i]] to y[j].
138 */
139static PetscErrorCode VecScatterRemap_SF(VecScatter vscat,const PetscInt *tomap,const PetscInt *frommap)
140{
141 VecScatter_SF *data = (VecScatter_SF *)vscat->data;
142 PetscSF sfs[2],sf;
143 PetscInt i,j;
144 PetscBool ident;
145 PetscErrorCode ierr;
146
147 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 147; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
148 sfs[0] = data->sf;
149 sfs[1] = data->lsf;
150
151 if (tomap) {
152 /* check if it is an identity map. If it is, do nothing */
153 ident = PETSC_TRUE;
154 for (i=0; i<data->sf->nleaves; i++) {if (i != tomap[i]) {ident = PETSC_FALSE; break; } }
155 if (ident) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
156
157 for (j=0; j<2; j++) {
158 sf = sfs[j];
159 ierr = PetscSFSetUp(sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),159,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* to bulid sf->rmine if SetUp is not yet called */
160 if (!sf->mine) { /* the old SF uses contiguous ilocal. After the remapping, it may not be true */
161 ierr = PetscMalloc1(sf->nleaves,&sf->mine)PetscMallocA(1,PETSC_FALSE,161,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(sf->nleaves)*sizeof(**(&sf->mine)),(&
sf->mine))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),161,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
162 ierr = PetscArraycpy(sf->mine,tomap,sf->nleaves)((sizeof(*(sf->mine)) != sizeof(*(tomap))) || PetscMemcpy(
sf->mine,tomap,(sf->nleaves)*sizeof(*(sf->mine))));
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),162,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
163 sf->mine_alloc = sf->mine;
164 } else {
165 for (i=0; i<sf->nleaves; i++) sf->mine[i] = tomap[sf->mine[i]];
166 }
167 for (i=0; i<sf->roffset[sf->nranks]; i++) sf->rmine[i] = tomap[sf->rmine[i]];
168 }
169 }
170
171 if (frommap) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unable to remap the FROM in scatters yet")return PetscError(((MPI_Comm)0x44000001),171,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,56,PETSC_ERROR_INITIAL,"Unable to remap the FROM in scatters yet"
)
;
172 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
173}
174
175static PetscErrorCode VecScatterGetRemoteCount_SF(VecScatter vscat,PetscBool send,PetscInt *num_procs,PetscInt *num_entries)
176{
177 VecScatter_SF *data = (VecScatter_SF *)vscat->data;
178 PetscSF sf = data->sf;
179 PetscInt nranks,remote_start;
180 PetscMPIInt myrank;
181 const PetscInt *offset;
182 const PetscMPIInt *ranks;
183 PetscErrorCode ierr;
184
185 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 185; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
186 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&myrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),186,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
187
188 if (send) { ierr = PetscSFGetRanks(sf,&nranks,&ranks,&offset,NULL((void*)0),NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),188,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; }
189 else { ierr = PetscSFGetLeafRanks(sf,&nranks,&ranks,&offset,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),189,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; }
190
191 if (nranks) {
192 remote_start = (myrank == ranks[0])? 1 : 0;
193 if (num_procs) *num_procs = nranks - remote_start;
194 if (num_entries) *num_entries = offset[nranks] - offset[remote_start];
195 } else {
196 if (num_procs) *num_procs = 0;
197 if (num_entries) *num_entries = 0;
198 }
199 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
200}
201
202static PetscErrorCode VecScatterGetRemote_SF(VecScatter vscat,PetscBool send,PetscInt *n,const PetscInt **starts,const PetscInt **indices,const PetscMPIInt **procs,PetscInt *bs)
203{
204 VecScatter_SF *data = (VecScatter_SF *)vscat->data;
205 PetscSF sf = data->sf;
206 PetscInt nranks,remote_start;
207 PetscMPIInt myrank;
208 const PetscInt *offset,*location;
209 const PetscMPIInt *ranks;
210 PetscErrorCode ierr;
211
212 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 212; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
213 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&myrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),213,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
214
215 if (send) { ierr = PetscSFGetRanks(sf,&nranks,&ranks,&offset,&location,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),215,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; }
216 else { ierr = PetscSFGetLeafRanks(sf,&nranks,&ranks,&offset,&location);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),216,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; }
217
218 if (nranks) {
219 remote_start = (myrank == ranks[0])? 1 : 0;
220 if (n) *n = nranks - remote_start;
221 if (starts) *starts = &offset[remote_start];
222 if (indices) *indices = location; /* not &location[offset[remote_start]]. Starts[0] may point to the middle of indices[] */
223 if (procs) *procs = &ranks[remote_start];
224 } else {
225 if (n) *n = 0;
226 if (starts) *starts = NULL((void*)0);
227 if (indices) *indices = NULL((void*)0);
228 if (procs) *procs = NULL((void*)0);
229 }
230
231 if (bs) *bs = 1;
232 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
233}
234
235static PetscErrorCode VecScatterGetRemoteOrdered_SF(VecScatter vscat,PetscBool send,PetscInt *n,const PetscInt **starts,const PetscInt **indices,const PetscMPIInt **procs,PetscInt *bs)
236{
237 PetscErrorCode ierr;
238
239 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 239; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
240 ierr = VecScatterGetRemote_SF(vscat,send,n,starts,indices,procs,bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),240,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
241 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
242}
243
244static PetscErrorCode VecScatterRestoreRemote_SF(VecScatter vscat,PetscBool send,PetscInt *n,const PetscInt **starts,const PetscInt **indices,const PetscMPIInt **procs,PetscInt *bs)
245{
246 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 246; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
247 if (starts) *starts = NULL((void*)0);
248 if (indices) *indices = NULL((void*)0);
249 if (procs) *procs = NULL((void*)0);
250 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
251}
252
253static PetscErrorCode VecScatterRestoreRemoteOrdered_SF(VecScatter vscat,PetscBool send,PetscInt *n,const PetscInt **starts,const PetscInt **indices,const PetscMPIInt **procs,PetscInt *bs)
254{
255 PetscErrorCode ierr;
256 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 256; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
257 ierr = VecScatterRestoreRemote_SF(vscat,send,n,starts,indices,procs,bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),257,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
258 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
259}
260
261typedef enum {IS_INVALID, IS_GENERAL, IS_BLOCK, IS_STRIDE} ISTypeID;
262
263PETSC_STATIC_INLINEstatic inline PetscErrorCode ISGetTypeID_Private(IS is,ISTypeID *id)
264{
265 PetscErrorCode ierr;
266 PetscBool same;
267
268 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 268; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
269 *id = IS_INVALID;
270 ierr = PetscObjectTypeCompare((PetscObject)is,ISGENERAL"general",&same);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),270,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
271 if (same) {*id = IS_GENERAL; goto functionend;}
272 ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK"block",&same);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),272,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
273 if (same) {*id = IS_BLOCK; goto functionend;}
274 ierr = PetscObjectTypeCompare((PetscObject)is,ISSTRIDE"stride",&same);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),274,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
275 if (same) {*id = IS_STRIDE; goto functionend;}
276functionend:
277 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
278}
279
280static PetscErrorCode VecScatterSetUp_SF(VecScatter vscat)
281{
282 VecScatter_SF *data;
283 MPI_Comm comm,xcomm,ycomm,bigcomm;
284 Vec x=vscat->from_v,y=vscat->to_v,xx,yy;
285 IS ix=vscat->from_is,iy=vscat->to_is,ixx,iyy;
1
'ixx' declared without an initial value
286 PetscMPIInt size,xcommsize,ycommsize,myrank;
287 PetscInt i,j,n,N,nroots,nleaves,inedges=0,*leafdata,*rootdata,*ilocal,*lilocal,xstart,ystart,lnleaves,ixsize,iysize,xlen,ylen;
288 const PetscInt *xindices,*yindices,*degree;
289 PetscSFNode *iremote,*liremote;
290 PetscLayout xlayout,ylayout;
291 PetscSF tmpsf;
292 ISTypeID ixid,iyid;
293 PetscInt bs,bsx,bsy,min=PETSC_MIN_INT(-2147483647 - 1),max=PETSC_MAX_INT2147483647,ixfirst,ixstep,iyfirst,iystep;
294 PetscBool can_do_block_opt=PETSC_FALSE;
295 PetscErrorCode ierr;
296
297 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 297; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
298 ierr = PetscNewLog(vscat,&data)(PetscMallocA(1,PETSC_TRUE,298,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(1)*sizeof(**(((&data)))),(((&data)))) || PetscLogObjectMemory
((PetscObject)vscat,sizeof(**(&data))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),298,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
299
300 /* Let P and S stand for parallel and sequential vectors respectively, there are four combinations of vecscatters: PtoP, PtoS, StoP and StoS.
301 The assumption of VecScatterCreate(Vec x,IS ix,Vec y,IS iy,VecScatter *newctx) is: if x is parallel, then ix contains global
302 indices of x. If x is sequential, ix contains local indices of x. Similarily for y and iy.
303
304 SF builds around concepts of local leaves and remote roots, which correspond to an StoP scatter. We transform PtoP and PtoS to StoP, and
305 treat StoS as a trivial StoP.
306 */
307 ierr = PetscObjectGetComm((PetscObject)x,&xcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),307,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
308 ierr = PetscObjectGetComm((PetscObject)y,&ycomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),308,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
309 ierr = MPI_Comm_size(xcomm,&xcommsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),309,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
310 ierr = MPI_Comm_size(ycomm,&ycommsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),310,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
311
312 /* NULL ix or iy in VecScatterCreate(x,ix,y,iy,newctx) has special meaning. Recover them for these cases */
313 if (!ix) {
2
Assuming 'ix' is non-null
3
Taking false branch
314 if (xcommsize > 1 && ycommsize == 1) { /* PtoS: null ix means the whole x will be scattered to each seq y */
315 ierr = VecGetSize(x,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),315,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
316 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),N,0,1,&ix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),316,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
317 } else { /* PtoP, StoP or StoS: null ix means the whole local part of x will be scattered */
318 ierr = VecGetLocalSize(x,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),318,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
319 ierr = VecGetOwnershipRange(x,&xstart,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),319,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
320 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),n,xstart,1,&ix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),320,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
321 }
322 }
323
324 if (!iy) {
4
Assuming 'iy' is non-null
5
Taking false branch
325 if (xcommsize == 1 && ycommsize > 1) { /* StoP: null iy means the whole y will be scattered to from each seq x */
326 ierr = VecGetSize(y,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),326,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
327 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),N,0,1,&iy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),327,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
328 } else { /* PtoP, StoP or StoS: null iy means the whole local part of y will be scattered to */
329 ierr = VecGetLocalSize(y,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),329,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
330 ierr = VecGetOwnershipRange(y,&ystart,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),330,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
331 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),n,ystart,1,&iy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),331,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
332 }
333 }
334
335 /* Do error checking immediately after we have non-empty ix, iy */
336 ierr = ISGetLocalSize(ix,&ixsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),336,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
337 ierr = ISGetLocalSize(iy,&iysize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),337,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
338 ierr = VecGetSize(x,&xlen);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),338,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
339 ierr = VecGetSize(y,&ylen);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),339,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
340 if (ixsize != iysize) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Scatter sizes of ix and iy don't match locally")return PetscError(((MPI_Comm)0x44000001),340,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,60,PETSC_ERROR_INITIAL,"Scatter sizes of ix and iy don't match locally"
)
;
6
Taking false branch
341 ierr = ISGetMinMax(ix,&min,&max);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),341,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
342 if (min < 0 || max >= xlen) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Scatter indices in ix are out of range")return PetscError(((MPI_Comm)0x44000001),342,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,63,PETSC_ERROR_INITIAL,"Scatter indices in ix are out of range"
)
;
7
Assuming 'min' is >= 0
8
Taking false branch
343 ierr = ISGetMinMax(iy,&min,&max);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),343,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
344 if (min < 0 || max >= ylen) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Scatter indices in iy are out of range")return PetscError(((MPI_Comm)0x44000001),344,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,63,PETSC_ERROR_INITIAL,"Scatter indices in iy are out of range"
)
;
9
Assuming 'min' is >= 0
10
Taking false branch
345
346 /* Do block optimization by taking advantage of high level info available in ix, iy.
347 The block optimization is valid when all of the following conditions are met:
348 1) ix, iy are blocked or can be blocked (i.e., strided with step=1);
349 2) ix, iy have the same block size;
350 3) all processors agree on one block size;
351 4) no blocks span more than one process;
352 */
353 data->bs = 1; /* default, no blocking */
354 data->unit = MPIU_SCALAR((MPI_Datatype)0x4c00080b);
355 ierr = ISGetTypeID_Private(ix,&ixid);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),355,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
356 ierr = ISGetTypeID_Private(iy,&iyid);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),356,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
357 bigcomm = (ycommsize == 1) ? xcomm : ycomm;
11
Assuming 'ycommsize' is not equal to 1
12
'?' condition is false
358
359 if (ixid == IS_BLOCK) {ierr = ISGetBlockSize(ix,&bsx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),359,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
13
Taking false branch
360 else if (ixid == IS_STRIDE) {ierr = ISStrideGetInfo(ix,&ixfirst,&ixstep);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),360,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
14
Taking false branch
361
362 if ( iyid == IS_BLOCK) {ierr = ISGetBlockSize(iy,&bsy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),362,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
15
Taking false branch
363 else if (iyid == IS_STRIDE) {ierr = ISStrideGetInfo(iy,&iyfirst,&iystep);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),363,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
16
Taking false branch
364
365 /* Processors could go through different path in this if-else test */
366 if (ixid == IS_BLOCK && iyid == IS_BLOCK) {
367 min = PetscMin(bsx,bsy)(((bsx)<(bsy)) ? (bsx) : (bsy));
368 max = PetscMax(bsx,bsy)(((bsx)<(bsy)) ? (bsy) : (bsx));
369 } else if (ixid == IS_BLOCK && iyid == IS_STRIDE && iystep==1 && iyfirst%bsx==0) {
370 min = max = bsx;
371 } else if (ixid == IS_STRIDE && iyid == IS_BLOCK && ixstep==1 && ixfirst%bsy==0) {
372 min = max = bsy;
373 }
374 ierr = MPIU_Allreduce(MPI_IN_PLACE,&min,1,MPIU_INT,MPI_MIN,bigcomm)(PetscAllreduceBarrierCheck(bigcomm,1,374,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((bigcomm)),
0) || MPI_Allreduce(((void *) -1),(&min),(1),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000002)),(bigcomm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),374,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
375 ierr = MPIU_Allreduce(MPI_IN_PLACE,&max,1,MPIU_INT,MPI_MAX,bigcomm)(PetscAllreduceBarrierCheck(bigcomm,1,375,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((bigcomm)),
0) || MPI_Allreduce(((void *) -1),(&max),(1),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(bigcomm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),375,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
376
377 /* Since we used allreduce above, all ranks will have the same min and max. min==max
378 implies all ranks have the same bs. Do further test to see if local vectors are dividable
379 by bs on ALL ranks. If they are, we are ensured that no blocks span more than one processor.
380 */
381 if (min == max && min > 1) {
17
Assuming 'min' is > 1
18
Taking true branch
382 PetscInt m[2];
383 ierr = VecGetLocalSize(x,&xlen);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),383,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
384 ierr = VecGetLocalSize(y,&ylen);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),384,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
385 m[0] = xlen%min;
386 m[1] = ylen%min;
387 ierr = MPIU_Allreduce(MPI_IN_PLACE,m,2,MPIU_INT,MPI_LOR,bigcomm)(PetscAllreduceBarrierCheck(bigcomm,2,387,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((bigcomm)),
0) || MPI_Allreduce(((void *) -1),(m),(2),(((MPI_Datatype)0x4c000405
)),((MPI_Op)(0x58000007)),(bigcomm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),387,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
388 if (!m[0] && !m[1]) can_do_block_opt = PETSC_TRUE;
19
Assuming the condition is true
20
Assuming the condition is true
21
Taking true branch
389 }
390
391 /* If can_do_block_opt, then shrink x, y, ix and iy by bs to get xx, yy, ixx and iyy, whose indices
392 and layout are actually used in building SF. Suppose blocked ix representing {0,1,2,6,7,8} has
393 indices {0,2} and bs=3, then ixx = {0,2}; suppose strided iy={3,4,5,6,7,8}, then iyy={1,2}.
394
395 yy is a little special. If y is seq, then yy is the concatenation of seq y's on xcomm. In this way,
396 we can treat PtoP and PtoS uniformly as PtoP.
397 */
398 if (can_do_block_opt) {
22
Taking true branch
399 const PetscInt *indices;
400
401 data->bs = bs = min;
402 ierr = MPI_Type_contiguous(bs,MPIU_SCALAR((MPI_Datatype)0x4c00080b),&data->unit);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),402,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
403 ierr = MPI_Type_commit(&data->unit);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),403,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
404
405 /* Shrink x and ix */
406 ierr = VecCreateMPIWithArray(xcomm,1,xlen/bs,PETSC_DECIDE-1,NULL((void*)0),&xx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),406,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* We only care xx's layout */
407 if (ixid == IS_BLOCK) {
23
Taking false branch
408 ierr = ISBlockGetIndices(ix,&indices);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),408,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
409 ierr = ISBlockGetLocalSize(ix,&ixsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),409,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
410 ierr = ISCreateGeneral(PETSC_COMM_SELF((MPI_Comm)0x44000001),ixsize,indices,PETSC_COPY_VALUES,&ixx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),410,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
411 ierr = ISBlockRestoreIndices(ix,&indices);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),411,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
412 } else if (ixid == IS_STRIDE) {
24
Taking false branch
413 ierr = ISGetLocalSize(ix,&ixsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),413,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
414 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),ixsize/bs,ixfirst/bs,1,&ixx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),414,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
415 }
416
417 /* Shrink y and iy */
418 ierr = VecCreateMPIWithArray(bigcomm,1,ylen/bs,PETSC_DECIDE-1,NULL((void*)0),&yy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),418,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
419 if (iyid == IS_BLOCK) {
25
Taking false branch
420 ierr = ISBlockGetIndices(iy,&indices);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),420,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
421 ierr = ISBlockGetLocalSize(iy,&iysize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),421,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
422 ierr = ISCreateGeneral(PETSC_COMM_SELF((MPI_Comm)0x44000001),iysize,indices,PETSC_COPY_VALUES,&iyy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),422,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
423 ierr = ISBlockRestoreIndices(iy,&indices);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),423,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
424 } else if (iyid == IS_STRIDE) {
26
Taking false branch
425 ierr = ISGetLocalSize(iy,&iysize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),425,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
426 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),iysize/bs,iyfirst/bs,1,&iyy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),426,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
427 }
428 } else {
429 ixx = ix;
430 iyy = iy;
431 xx = x;
432 if (ycommsize == 1) {ierr = VecCreateMPIWithArray(bigcomm,1,ylen,PETSC_DECIDE-1,NULL((void*)0),&yy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),432,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;} else yy = y;
433 }
434
435 /* Now it is ready to build SF with preprocessed (xx, yy) and (ixx, iyy) */
436 ierr = ISGetIndices(ixx,&xindices);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),436,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
27
1st function call argument is an uninitialized value
437 ierr = ISGetIndices(iyy,&yindices);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),437,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
438
439 if (xcommsize > 1) {
440 /* PtoP or PtoS */
441 ierr = VecGetLayout(xx,&xlayout);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),441,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
442 ierr = VecGetOwnershipRange(xx,&xstart,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),442,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
443 ierr = VecGetLayout(yy,&ylayout);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),443,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
444 ierr = VecGetOwnershipRange(yy,&ystart,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),444,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
445
446 /* Each process has a set of global index pairs (i, j) to scatter xx[i] to yy[j]. We first shift (i, j) to owner process of i through a tmp SF */
447 ierr = VecGetLocalSize(xx,&nroots);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),447,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
448 ierr = ISGetLocalSize(ixx,&nleaves);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),448,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
449 ierr = PetscMalloc2(nleaves,&iremote,nleaves*2,&leafdata)PetscMallocA(2,PETSC_FALSE,449,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(nleaves)*sizeof(**(&iremote)),(&iremote),(size_t
)(nleaves*2)*sizeof(**(&leafdata)),(&leafdata))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),449,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
450
451 for (i=0; i<nleaves; i++) {
452 ierr = PetscLayoutFindOwnerIndex(xlayout,xindices[i],&iremote[i].rank,&iremote[i].index);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),452,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
453 leafdata[2*i] = xindices[i];
454 leafdata[2*i+1] = (ycommsize > 1)? yindices[i] : yindices[i] + ystart;
455 }
456
457 ierr = PetscSFCreate(xcomm,&tmpsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),457,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
458 ierr = PetscSFSetGraph(tmpsf,nroots,nleaves,NULL((void*)0),PETSC_USE_POINTER,iremote,PETSC_USE_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),458,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
459
460 ierr = PetscSFComputeDegreeBegin(tmpsf,&degree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),460,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
461 ierr = PetscSFComputeDegreeEnd(tmpsf,&degree);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),461,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
462
463 for (i=0; i<nroots; i++) inedges += degree[i];
464 ierr = PetscMalloc1(inedges*2,&rootdata)PetscMallocA(1,PETSC_FALSE,464,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(inedges*2)*sizeof(**(&rootdata)),(&rootdata
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),464,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
465 ierr = PetscSFGatherBegin(tmpsf,MPIU_2INT((MPI_Datatype)0x4c000816),leafdata,rootdata);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),465,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
466 ierr = PetscSFGatherEnd(tmpsf,MPIU_2INT((MPI_Datatype)0x4c000816),leafdata,rootdata);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),466,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
467
468 ierr = PetscFree2(iremote,leafdata)PetscFreeA(2,468,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,&(iremote),&(leafdata))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),468,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
469 ierr = PetscSFDestroy(&tmpsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),469,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
470
471 /* rootdata contains global index pairs (i, j). i's are owned by the current process, but j's can point to anywhere.
472 We convert i to local, and convert j to (rank, index). In the end, we get an StoP suitable for building SF.
473 */
474 nleaves = inedges;
475 ierr = VecGetLocalSize(yy,&nroots);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),475,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
476 ierr = PetscMalloc1(nleaves,&ilocal)PetscMallocA(1,PETSC_FALSE,476,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(nleaves)*sizeof(**(&ilocal)),(&ilocal))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),476,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
477 ierr = PetscMalloc1(nleaves,&iremote)PetscMallocA(1,PETSC_FALSE,477,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(nleaves)*sizeof(**(&iremote)),(&iremote))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),477,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
478
479 for (i=0; i<inedges; i++) {
480 ilocal[i] = rootdata[2*i] - xstart; /* covert x's global index to local index */
481 ierr = PetscLayoutFindOwnerIndex(ylayout,rootdata[2*i+1],&iremote[i].rank,&iremote[i].index);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),481,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* convert y's global index to (rank, index) */
482 }
483
484 /* MUST build SF on yy's comm, which is not necessarily identical to xx's comm.
485 In SF's view, yy contains the roots (i.e., the remote) and iremote[].rank are ranks in yy's comm.
486 xx contains leaves, which are local and can be thought as part of PETSC_COMM_SELF. */
487 ierr = PetscSFCreate(PetscObjectComm((PetscObject)yy),&data->sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),487,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
488 ierr = PetscSFSetGraph(data->sf,nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),488,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
489 ierr = PetscFree(rootdata)((*PetscTrFree)((void*)(rootdata),489,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
) || ((rootdata) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),489,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
490 } else {
491 /* StoP or StoS */
492 ierr = VecGetLayout(yy,&ylayout);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),492,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
493 ierr = ISGetLocalSize(ixx,&nleaves);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),493,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
494 ierr = VecGetLocalSize(yy,&nroots);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),494,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
495 ierr = PetscMalloc1(nleaves,&ilocal)PetscMallocA(1,PETSC_FALSE,495,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(nleaves)*sizeof(**(&ilocal)),(&ilocal))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),495,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
496 ierr = PetscMalloc1(nleaves,&iremote)PetscMallocA(1,PETSC_FALSE,496,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(nleaves)*sizeof(**(&iremote)),(&iremote))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),496,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
497 ierr = PetscArraycpy(ilocal,xindices,nleaves)((sizeof(*(ilocal)) != sizeof(*(xindices))) || PetscMemcpy(ilocal
,xindices,(nleaves)*sizeof(*(ilocal))));
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),497,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
498 for (i=0; i<nleaves; i++) {ierr = PetscLayoutFindOwnerIndex(ylayout,yindices[i],&iremote[i].rank,&iremote[i].index);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),498,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
499 ierr = PetscSFCreate(PetscObjectComm((PetscObject)yy),&data->sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),499,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
500 ierr = PetscSFSetGraph(data->sf,nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),500,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
501 }
502
503 /* Free memory no longer needed */
504 ierr = ISRestoreIndices(ixx,&xindices);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),504,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
505 ierr = ISRestoreIndices(iyy,&yindices);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),505,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
506 if (can_do_block_opt) {
507 ierr = VecDestroy(&xx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),507,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
508 ierr = VecDestroy(&yy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),508,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
509 ierr = ISDestroy(&ixx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),509,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
510 ierr = ISDestroy(&iyy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),510,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
511 } else if (ycommsize == 1) {
512 ierr = VecDestroy(&yy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),512,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
513 }
514 if (!vscat->from_is) {ierr = ISDestroy(&ix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),514,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
515 if (!vscat->to_is ) {ierr = ISDestroy(&iy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),515,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
516
517 /* Create lsf, the local scatter. Could use PetscSFCreateEmbeddedLeafSF, but since we know the comm is PETSC_COMM_SELF, we can make it fast */
518 ierr = PetscObjectGetComm((PetscObject)data->sf,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),518,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
519 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),519,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
520 ierr = MPI_Comm_rank(comm,&myrank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),520,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
521
522 /* Find out local edges and build a local SF */
523 {
524 const PetscInt *ilocal;
525 const PetscSFNode *iremote;
526 ierr = PetscSFGetGraph(data->sf,&nroots,&nleaves,&ilocal,&iremote);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),526,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
527 for (i=lnleaves=0; i<nleaves; i++) {if (iremote[i].rank == (PetscInt)myrank) lnleaves++;}
528 ierr = PetscMalloc1(lnleaves,&lilocal)PetscMallocA(1,PETSC_FALSE,528,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(lnleaves)*sizeof(**(&lilocal)),(&lilocal))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),528,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
529 ierr = PetscMalloc1(lnleaves,&liremote)PetscMallocA(1,PETSC_FALSE,529,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,(size_t)(lnleaves)*sizeof(**(&liremote)),(&liremote)
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),529,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
530
531 for (i=j=0; i<nleaves; i++) {
532 if (iremote[i].rank == (PetscInt)myrank) {
533 lilocal[j] = ilocal? ilocal[i] : i; /* ilocal=NULL for contiguous storage */
534 liremote[j].rank = 0; /* rank in PETSC_COMM_SELF */
535 liremote[j].index = iremote[i].index;
536 j++;
537 }
538 }
539 ierr = PetscSFCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001),&data->lsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),539,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
540 ierr = PetscSFSetGraph(data->lsf,nroots,lnleaves,lilocal,PETSC_OWN_POINTER,liremote,PETSC_OWN_POINTER);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),540,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
541 }
542
543 /* vecscatter uses eager setup */
544 ierr = PetscSFSetUp(data->sf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),544,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
545 ierr = PetscSFSetUp(data->lsf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),545,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
546
547 vscat->data = (void*)data;
548 vscat->ops->begin = VecScatterBegin_SF;
549 vscat->ops->end = VecScatterEnd_SF;
550 vscat->ops->remap = VecScatterRemap_SF;
551 vscat->ops->copy = VecScatterCopy_SF;
552 vscat->ops->destroy = VecScatterDestroy_SF;
553 vscat->ops->view = VecScatterView_SF;
554 vscat->ops->getremotecount = VecScatterGetRemoteCount_SF;
555 vscat->ops->getremote = VecScatterGetRemote_SF;
556 vscat->ops->getremoteordered = VecScatterGetRemoteOrdered_SF;
557 vscat->ops->restoreremote = VecScatterRestoreRemote_SF;
558 vscat->ops->restoreremoteordered = VecScatterRestoreRemoteOrdered_SF;
559 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
560}
561
562PetscErrorCode VecScatterCreate_SF(VecScatter ctx)
563{
564 PetscErrorCode ierr;
565
566 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
; petscstack->line[petscstack->currentsize] = 566; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
567 ctx->ops->setup = VecScatterSetUp_SF;
568 ierr = PetscObjectChangeTypeName((PetscObject)ctx,VECSCATTERSF"sf");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),568,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
569 ierr = PetscInfo(ctx,"Using StarForest for vector scatter\n")PetscInfo_Private(__func__,ctx,"Using StarForest for vector scatter\n"
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),569,__func__,"/sandbox/petsc/petsc.next/src/vec/vscat/impls/sf/vscatsf.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
570 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
571}