Actual source code: pvecimpl.h
1: #ifndef PETSC_PVECIMPL_H
2: #define PETSC_PVECIMPL_H
4: #include <../src/vec/vec/impls/dvecimpl.h>
6: typedef struct {
7: PetscInt insertmode;
8: PetscInt count;
9: PetscInt bcount;
10: } VecAssemblyHeader;
12: typedef struct {
13: PetscInt *ints;
14: PetscInt *intb;
15: PetscScalar *scalars;
16: PetscScalar *scalarb;
17: char pendings;
18: char pendingb;
19: } VecAssemblyFrame;
21: typedef struct {
22: VECHEADER
23: PetscInt nghost; /* number of ghost points on this process */
24: Vec localrep; /* local representation of vector */
25: VecScatter localupdate; /* scatter to update ghost values */
27: PetscBool assembly_subset; /* Subsequent assemblies will set a subset (perhaps equal) of off-process entries set on first assembly */
28: PetscBool first_assembly_done; /* Is the first time assembly done? */
29: PetscBool use_status; /* Use MPI_Status to determine number of items in each message */
30: PetscMPIInt nsendranks;
31: PetscMPIInt nrecvranks;
32: PetscMPIInt *sendranks;
33: PetscMPIInt *recvranks;
34: VecAssemblyHeader *sendhdr, *recvhdr;
35: VecAssemblyFrame *sendptrs; /* pointers to the main messages */
36: MPI_Request *sendreqs;
37: MPI_Request *recvreqs;
38: PetscSegBuffer segrecvint;
39: PetscSegBuffer segrecvscalar;
40: PetscSegBuffer segrecvframe;
41: #if defined(PETSC_HAVE_NVSHMEM)
42: PetscBool use_nvshmem; /* Try to use NVSHMEM in communication of, for example, VecNorm */
43: #endif
45: /* COO fields, assuming m is the vector's local size */
46: PetscCount coo_n;
47: PetscCount tot1; /* Total local entries in COO arrays */
48: PetscCount *jmap1; /* [m+1]: i-th entry of the vector has jmap1[i+1]-jmap1[i] repeats in COO arrays */
49: PetscCount *perm1; /* [tot1]: permutation array for local entries */
51: PetscCount nnz2; /* Unique entries in recvbuf */
52: PetscCount *imap2; /* [nnz2]: i-th unique entry in recvbuf is imap2[i]-th entry in the vector */
53: PetscCount *jmap2; /* [nnz2+1] */
54: PetscCount *perm2; /* [recvlen] */
56: PetscSF coo_sf;
57: PetscCount sendlen, recvlen; /* Lengths (in unit of PetscScalar) of send/recvbuf */
58: PetscCount *Cperm; /* [sendlen]: permutation array to fill sendbuf[]. 'C' for communication */
59: PetscScalar *sendbuf, *recvbuf; /* Buffers for remote values in VecSetValuesCOO() */
60: } Vec_MPI;
62: PETSC_INTERN PetscErrorCode VecDot_MPI(Vec, Vec, PetscScalar *);
63: PETSC_INTERN PetscErrorCode VecMDot_MPI(Vec, PetscInt, const Vec[], PetscScalar *);
64: PETSC_INTERN PetscErrorCode VecTDot_MPI(Vec, Vec, PetscScalar *);
65: PETSC_INTERN PetscErrorCode VecMTDot_MPI(Vec, PetscInt, const Vec[], PetscScalar *);
66: PETSC_INTERN PetscErrorCode VecNorm_MPI(Vec, NormType, PetscReal *);
67: PETSC_INTERN PetscErrorCode VecMax_MPI(Vec, PetscInt *, PetscReal *);
68: PETSC_INTERN PetscErrorCode VecMin_MPI(Vec, PetscInt *, PetscReal *);
69: PETSC_INTERN PetscErrorCode VecDestroy_MPI(Vec);
70: PETSC_INTERN PetscErrorCode VecView_MPI_Binary(Vec, PetscViewer);
71: PETSC_INTERN PetscErrorCode VecView_MPI_Draw_LG(Vec, PetscViewer);
72: PETSC_INTERN PetscErrorCode VecView_MPI_Socket(Vec, PetscViewer);
73: PETSC_INTERN PetscErrorCode VecView_MPI_HDF5(Vec, PetscViewer);
74: PETSC_INTERN PetscErrorCode VecView_MPI_ADIOS(Vec, PetscViewer);
75: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec, PetscViewer);
76: PETSC_INTERN PetscErrorCode VecGetSize_MPI(Vec, PetscInt *);
77: PETSC_INTERN PetscErrorCode VecGetValues_MPI(Vec, PetscInt, const PetscInt[], PetscScalar[]);
78: PETSC_INTERN PetscErrorCode VecSetValues_MPI(Vec, PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
79: PETSC_INTERN PetscErrorCode VecSetValuesBlocked_MPI(Vec, PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
80: PETSC_INTERN PetscErrorCode VecAssemblyBegin_MPI(Vec);
81: PETSC_INTERN PetscErrorCode VecAssemblyEnd_MPI(Vec);
82: PETSC_INTERN PetscErrorCode VecAssemblyReset_MPI(Vec);
83: PETSC_INTERN PetscErrorCode VecCreate_MPI_Private(Vec, PetscBool, PetscInt, const PetscScalar[]);
84: PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec);
85: PETSC_INTERN PetscErrorCode VecPlaceArray_MPI(Vec, const PetscScalar *);
86: PETSC_INTERN PetscErrorCode VecDuplicate_MPI(Vec, Vec *);
87: PETSC_INTERN PetscErrorCode VecResetArray_MPI(Vec);
88: PETSC_INTERN PetscErrorCode VecSetPreallocationCOO_MPI(Vec, PetscCount, const PetscInt[]);
89: PETSC_INTERN PetscErrorCode VecSetValuesCOO_MPI(Vec, const PetscScalar[], InsertMode);
91: static inline PetscErrorCode VecMXDot_MPI_Default(Vec xin, PetscInt nv, const Vec y[], PetscScalar *z, PetscErrorCode (*VecMXDot_SeqFn)(Vec, PetscInt, const Vec[], PetscScalar *))
92: {
93: PetscFunctionBegin;
94: PetscCall(VecMXDot_SeqFn(xin, nv, y, z));
95: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, z, nv, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
96: PetscFunctionReturn(PETSC_SUCCESS);
97: }
99: static inline PetscErrorCode VecXDot_MPI_Default(Vec xin, Vec yin, PetscScalar *z, PetscErrorCode (*VecXDot_SeqFn)(Vec, Vec, PetscScalar *))
100: {
101: PetscFunctionBegin;
102: PetscCall(VecXDot_SeqFn(xin, yin, z));
103: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, z, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: static inline PetscErrorCode VecMinMax_MPI_Default(Vec xin, PetscInt *idx, PetscReal *z, PetscErrorCode (*VecMinMax_SeqFn)(Vec, PetscInt *, PetscReal *), const MPI_Op ops[2])
108: {
109: PetscFunctionBegin;
110: /* Find the local max */
111: PetscCall(VecMinMax_SeqFn(xin, idx, z));
112: if (PetscDefined(HAVE_MPIUNI)) PetscFunctionReturn(PETSC_SUCCESS);
113: /* Find the global max */
114: if (idx) {
115: struct {
116: PetscReal v;
117: PetscInt i;
118: } in = {*z, *idx + xin->map->rstart};
120: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &in, 1, MPIU_REAL_INT, ops[0], PetscObjectComm((PetscObject)xin)));
121: *z = in.v;
122: *idx = in.i;
123: } else {
124: /* User does not need idx */
125: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, z, 1, MPIU_REAL, ops[1], PetscObjectComm((PetscObject)xin)));
126: }
127: PetscFunctionReturn(PETSC_SUCCESS);
128: }
130: static inline PetscErrorCode VecDotNorm2_MPI_Default(Vec s, Vec t, PetscScalar *dp, PetscScalar *nm, PetscErrorCode (*VecDotNorm2_SeqFn)(Vec, Vec, PetscScalar *, PetscScalar *))
131: {
132: PetscFunctionBegin;
133: PetscCall(VecDotNorm2_SeqFn(s, t, dp, nm));
134: {
135: PetscScalar sum[] = {*dp, *nm};
137: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &sum, 2, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)s)));
138: *dp = sum[0];
139: *nm = sum[1];
140: }
141: PetscFunctionReturn(PETSC_SUCCESS);
142: }
144: static inline PetscErrorCode VecNorm_MPI_Default(Vec xin, NormType type, PetscReal *z, PetscErrorCode (*VecNorm_SeqFn)(Vec, NormType, PetscReal *))
145: {
146: PetscMPIInt zn = 1;
147: MPI_Op op = MPIU_SUM;
149: PetscFunctionBegin;
150: PetscCall(VecNorm_SeqFn(xin, type, z));
151: switch (type) {
152: case NORM_1_AND_2:
153: // the 2 norm needs to be squared below before being summed. NORM_2 stores the norm in the
154: // first slot but while NORM_1_AND_2 stores it in the second
155: z[1] *= z[1];
156: zn = 2;
157: break;
158: case NORM_2:
159: z[0] *= z[0];
160: case NORM_1:
161: case NORM_FROBENIUS:
162: break;
163: case NORM_INFINITY:
164: op = MPIU_MAX;
165: break;
166: }
167: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, z, zn, MPIU_REAL, op, PetscObjectComm((PetscObject)xin)));
168: if (type == NORM_2 || type == NORM_FROBENIUS || type == NORM_1_AND_2) z[type == NORM_1_AND_2] = PetscSqrtReal(z[type == NORM_1_AND_2]);
169: PetscFunctionReturn(PETSC_SUCCESS);
170: }
171: #endif // PETSC_PVECIMPL_H