Bug Summary

File:mat/interface/matrix.c
Warning:line 9210, column 7
Value stored to 'ierr' is never read

Annotated Source Code

[?] Use j/k keys for keyboard navigation

1
2/*
3 This is where the abstract matrix operations are defined
4*/
5
6#include <petsc/private/matimpl.h> /*I "petscmat.h" I*/
7#include <petsc/private/isimpl.h>
8#include <petsc/private/vecimpl.h>
9
10/* Logging support */
11PetscClassId MAT_CLASSID;
12PetscClassId MAT_COLORING_CLASSID;
13PetscClassId MAT_FDCOLORING_CLASSID;
14PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
15
16PetscLogEvent MAT_Mult, MAT_Mults, MAT_MultConstrained, MAT_MultAdd, MAT_MultTranspose;
17PetscLogEvent MAT_MultTransposeConstrained, MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve,MAT_MatTrSolve;
18PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
19PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
20PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
21PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
22PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
23PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply,MAT_Transpose,MAT_FDColoringFunction, MAT_CreateSubMat;
24PetscLogEvent MAT_TransposeColoringCreate;
25PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
26PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric,MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
27PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
28PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
29PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
30PetscLogEvent MAT_MultHermitianTranspose,MAT_MultHermitianTransposeAdd;
31PetscLogEvent MAT_Getsymtranspose, MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
32PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
33PetscLogEvent MAT_Applypapt, MAT_Applypapt_numeric, MAT_Applypapt_symbolic, MAT_GetSequentialNonzeroStructure;
34PetscLogEvent MAT_GetMultiProcBlock;
35PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_SetValuesBatch;
36PetscLogEvent MAT_ViennaCLCopyToGPU;
37PetscLogEvent MAT_Merge,MAT_Residual,MAT_SetRandom;
38PetscLogEvent MATCOLORING_Apply,MATCOLORING_Comm,MATCOLORING_Local,MATCOLORING_ISCreate,MATCOLORING_SetUp,MATCOLORING_Weights;
39
40const char *const MatFactorTypes[] = {"NONE","LU","CHOLESKY","ILU","ICC","ILUDT","MatFactorType","MAT_FACTOR_",0};
41
42/*@
43 MatSetRandom - Sets all components of a matrix to random numbers. For sparse matrices that have been preallocated but not been assembled it randomly selects appropriate locations
44
45 Logically Collective on Mat
46
47 Input Parameters:
48+ x - the matrix
49- rctx - the random number context, formed by PetscRandomCreate(), or NULL and
50 it will create one internally.
51
52 Output Parameter:
53. x - the matrix
54
55 Example of Usage:
56.vb
57 PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
58 MatSetRandom(x,rctx);
59 PetscRandomDestroy(rctx);
60.ve
61
62 Level: intermediate
63
64
65.seealso: MatZeroEntries(), MatSetValues(), PetscRandomCreate(), PetscRandomDestroy()
66@*/
67PetscErrorCode MatSetRandom(Mat x,PetscRandom rctx)
68{
69 PetscErrorCode ierr;
70 PetscRandom randObj = NULL((void*)0);
71
72 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 72; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
73 PetscValidHeaderSpecific(x,MAT_CLASSID,1)do { if (!x) return PetscError(((MPI_Comm)0x44000001),73,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),73,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(x))->classid != MAT_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),73,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),73,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
74 if (rctx) PetscValidHeaderSpecific(rctx,PETSC_RANDOM_CLASSID,2)do { if (!rctx) return PetscError(((MPI_Comm)0x44000001),74,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(rctx
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),74,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",64,PETSC_ERROR_INITIAL
,"Invalid Pointer to Object: Parameter # %d",2); if (((PetscObject
)(rctx))->classid != PETSC_RANDOM_CLASSID) { if (((PetscObject
)(rctx))->classid == -1) return PetscError(((MPI_Comm)0x44000001
),74,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),74,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
75 PetscValidType(x,1)do { if (!((PetscObject)x)->type_name) return PetscError((
(MPI_Comm)0x44000001),75,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)x)->class_name,1); } while (0)
;
76
77 if (!x->ops->setrandom) SETERRQ1(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Mat type %s",((PetscObject)x)->type_name)return PetscError(PetscObjectComm((PetscObject)x),77,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)x)->type_name)
;
78
79 if (!rctx) {
80 MPI_Comm comm;
81 ierr = PetscObjectGetComm((PetscObject)x,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),81,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
82 ierr = PetscRandomCreate(comm,&randObj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),82,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
83 ierr = PetscRandomSetFromOptions(randObj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),83,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
84 rctx = randObj;
85 }
86
87 ierr = PetscLogEventBegin(MAT_SetRandom,x,rctx,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetRandom].active) ? (*PetscLogPLB)((MAT_SetRandom),0,(PetscObject
)(x),(PetscObject)(rctx),(PetscObject)(0),(PetscObject)(0)) :
0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),87,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
88 ierr = (*x->ops->setrandom)(x,rctx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),88,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
89 ierr = PetscLogEventEnd(MAT_SetRandom,x,rctx,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetRandom].active) ? (*PetscLogPLE)((MAT_SetRandom),0,(PetscObject
)(x),(PetscObject)(rctx),(PetscObject)(0),(PetscObject)(0)) :
0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),89,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
90
91 ierr = MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),91,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
92 ierr = MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),92,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
93 ierr = PetscRandomDestroy(&randObj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),93,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
94 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
95}
96
97/*@
98 MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
99
100 Logically Collective on Mat
101
102 Input Parameters:
103. mat - the factored matrix
104
105 Output Parameter:
106+ pivot - the pivot value computed
107- row - the row that the zero pivot occurred. Note that this row must be interpreted carefully due to row reorderings and which processes
108 the share the matrix
109
110 Level: advanced
111
112 Notes:
113 This routine does not work for factorizations done with external packages.
114 This routine should only be called if MatGetFactorError() returns a value of MAT_FACTOR_NUMERIC_ZEROPIVOT
115
116 This can be called on non-factored matrices that come from, for example, matrices used in SOR.
117
118.seealso: MatZeroEntries(), MatFactor(), MatGetFactor(), MatFactorSymbolic(), MatFactorClearError(), MatFactorGetErrorZeroPivot()
119@*/
120PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat,PetscReal *pivot,PetscInt *row)
121{
122 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 122; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
123 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),123,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),123,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),123,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),123,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
124 *pivot = mat->factorerror_zeropivot_value;
125 *row = mat->factorerror_zeropivot_row;
126 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
127}
128
129/*@
130 MatFactorGetError - gets the error code from a factorization
131
132 Logically Collective on Mat
133
134 Input Parameters:
135. mat - the factored matrix
136
137 Output Parameter:
138. err - the error code
139
140 Level: advanced
141
142 Notes:
143 This can be called on non-factored matrices that come from, for example, matrices used in SOR.
144
145.seealso: MatZeroEntries(), MatFactor(), MatGetFactor(), MatFactorSymbolic(), MatFactorClearError(), MatFactorGetErrorZeroPivot()
146@*/
147PetscErrorCode MatFactorGetError(Mat mat,MatFactorError *err)
148{
149 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 149; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
150 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),150,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),150,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),150,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),150,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
151 *err = mat->factorerrortype;
152 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
153}
154
155/*@
156 MatFactorClearError - clears the error code in a factorization
157
158 Logically Collective on Mat
159
160 Input Parameter:
161. mat - the factored matrix
162
163 Level: developer
164
165 Notes:
166 This can be called on non-factored matrices that come from, for example, matrices used in SOR.
167
168.seealso: MatZeroEntries(), MatFactor(), MatGetFactor(), MatFactorSymbolic(), MatFactorGetError(), MatFactorGetErrorZeroPivot()
169@*/
170PetscErrorCode MatFactorClearError(Mat mat)
171{
172 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 172; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
173 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),173,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),173,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),173,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),173,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
174 mat->factorerrortype = MAT_FACTOR_NOERROR;
175 mat->factorerror_zeropivot_value = 0.0;
176 mat->factorerror_zeropivot_row = 0;
177 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
178}
179
180PETSC_INTERNextern __attribute__((visibility ("hidden"))) PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat,PetscBool cols,PetscReal tol,IS *nonzero)
181{
182 PetscErrorCode ierr;
183 Vec r,l;
184 const PetscScalar *al;
185 PetscInt i,nz,gnz,N,n;
186
187 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 187; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
188 ierr = MatCreateVecs(mat,&r,&l);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),188,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
189 if (!cols) { /* nonzero rows */
190 ierr = MatGetSize(mat,&N,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),190,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
191 ierr = MatGetLocalSize(mat,&n,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
192 ierr = VecSet(l,0.0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),192,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
193 ierr = VecSetRandom(r,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),193,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
194 ierr = MatMult(mat,r,l);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),194,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
195 ierr = VecGetArrayRead(l,&al);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),195,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
196 } else { /* nonzero columns */
197 ierr = MatGetSize(mat,NULL((void*)0),&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),197,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
198 ierr = MatGetLocalSize(mat,NULL((void*)0),&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),198,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
199 ierr = VecSet(r,0.0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),199,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
200 ierr = VecSetRandom(l,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),200,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
201 ierr = MatMultTranspose(mat,l,r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),201,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
202 ierr = VecGetArrayRead(r,&al);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),202,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
203 }
204 if (tol <= 0.0) { for (i=0,nz=0;i<n;i++) if (al[i] != 0.0) nz++; }
205 else { for (i=0,nz=0;i<n;i++) if (PetscAbsScalar(al[i]) > tol) nz++; }
206 ierr = MPIU_Allreduce(&nz,&gnz,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)mat))(PetscAllreduceBarrierCheck(PetscObjectComm((PetscObject)mat)
,1,206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((&nz),(&gnz)
,(1),(((MPI_Datatype)0x4c000405)),((MPI_Op)(0x58000003)),(PetscObjectComm
((PetscObject)mat)))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
207 if (gnz != N) {
208 PetscInt *nzr;
209 ierr = PetscMalloc1(nz,&nzr)PetscMallocA(1,PETSC_FALSE,209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nz)*sizeof(**(&nzr)),(&nzr))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
210 if (nz) {
211 if (tol < 0) { for (i=0,nz=0;i<n;i++) if (al[i] != 0.0) nzr[nz++] = i; }
212 else { for (i=0,nz=0;i<n;i++) if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i; }
213 }
214 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)mat),nz,nzr,PETSC_OWN_POINTER,nonzero);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),214,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
215 } else *nonzero = NULL((void*)0);
216 if (!cols) { /* nonzero rows */
217 ierr = VecRestoreArrayRead(l,&al);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),217,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
218 } else {
219 ierr = VecRestoreArrayRead(r,&al);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),219,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
220 }
221 ierr = VecDestroy(&l);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),221,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
222 ierr = VecDestroy(&r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
223 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
224}
225
226/*@
227 MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
228
229 Input Parameter:
230. A - the matrix
231
232 Output Parameter:
233. keptrows - the rows that are not completely zero
234
235 Notes:
236 keptrows is set to NULL if all rows are nonzero.
237
238 Level: intermediate
239
240 @*/
241PetscErrorCode MatFindNonzeroRows(Mat mat,IS *keptrows)
242{
243 PetscErrorCode ierr;
244
245 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 245; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
246 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),246,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),246,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),246,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),246,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
247 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),247,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
248 PetscValidPointer(keptrows,2)do { if (!keptrows) return PetscError(((MPI_Comm)0x44000001),
248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(keptrows,PETSC_CHAR)) return PetscError(
((MPI_Comm)0x44000001),248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
249 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),249,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
250 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),250,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
251 if (!mat->ops->findnonzerorows) {
252 ierr = MatFindNonzeroRowsOrCols_Basic(mat,PETSC_FALSE,0.0,keptrows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),252,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
253 } else {
254 ierr = (*mat->ops->findnonzerorows)(mat,keptrows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),254,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
255 }
256 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
257}
258
259/*@
260 MatFindZeroRows - Locate all rows that are completely zero in the matrix
261
262 Input Parameter:
263. A - the matrix
264
265 Output Parameter:
266. zerorows - the rows that are completely zero
267
268 Notes:
269 zerorows is set to NULL if no rows are zero.
270
271 Level: intermediate
272
273 @*/
274PetscErrorCode MatFindZeroRows(Mat mat,IS *zerorows)
275{
276 PetscErrorCode ierr;
277 IS keptrows;
278 PetscInt m, n;
279
280 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),280,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),280,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),280,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),280,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
281 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),281,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
282
283 ierr = MatFindNonzeroRows(mat, &keptrows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),283,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
284 /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
285 In keeping with this convention, we set zerorows to NULL if there are no zero
286 rows. */
287 if (keptrows == NULL((void*)0)) {
288 *zerorows = NULL((void*)0);
289 } else {
290 ierr = MatGetOwnershipRange(mat,&m,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),290,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
291 ierr = ISComplement(keptrows,m,n,zerorows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),291,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
292 ierr = ISDestroy(&keptrows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),292,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
293 }
294 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
295}
296
297/*@
298 MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
299
300 Not Collective
301
302 Input Parameters:
303. A - the matrix
304
305 Output Parameters:
306. a - the diagonal part (which is a SEQUENTIAL matrix)
307
308 Notes:
309 see the manual page for MatCreateAIJ() for more information on the "diagonal part" of the matrix.
310 Use caution, as the reference count on the returned matrix is not incremented and it is used as
311 part of the containing MPI Mat's normal operation.
312
313 Level: advanced
314
315@*/
316PetscErrorCode MatGetDiagonalBlock(Mat A,Mat *a)
317{
318 PetscErrorCode ierr;
319
320 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 320; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
321 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),321,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),321,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),321,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),321,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
322 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),322,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
323 PetscValidPointer(a,3)do { if (!a) return PetscError(((MPI_Comm)0x44000001),323,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(a,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),323,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
324 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),324,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
325 if (!A->ops->getdiagonalblock) {
326 PetscMPIInt size;
327 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),327,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
328 if (size == 1) {
329 *a = A;
330 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
331 } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Not coded for this matrix type")return PetscError(PetscObjectComm((PetscObject)A),331,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Not coded for this matrix type")
;
332 }
333 ierr = (*A->ops->getdiagonalblock)(A,a);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),333,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
334 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
335}
336
337/*@
338 MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
339
340 Collective on Mat
341
342 Input Parameters:
343. mat - the matrix
344
345 Output Parameter:
346. trace - the sum of the diagonal entries
347
348 Level: advanced
349
350@*/
351PetscErrorCode MatGetTrace(Mat mat,PetscScalar *trace)
352{
353 PetscErrorCode ierr;
354 Vec diag;
355
356 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 356; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
357 ierr = MatCreateVecs(mat,&diag,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),357,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
358 ierr = MatGetDiagonal(mat,diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),358,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
359 ierr = VecSum(diag,trace);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),359,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
360 ierr = VecDestroy(&diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),360,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
361 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
362}
363
364/*@
365 MatRealPart - Zeros out the imaginary part of the matrix
366
367 Logically Collective on Mat
368
369 Input Parameters:
370. mat - the matrix
371
372 Level: advanced
373
374
375.seealso: MatImaginaryPart()
376@*/
377PetscErrorCode MatRealPart(Mat mat)
378{
379 PetscErrorCode ierr;
380
381 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 381; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
382 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),382,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),382,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),382,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),382,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
383 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),383,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
384 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),384,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
385 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),385,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
386 if (!mat->ops->realpart) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),386,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
387 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),387,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
388 ierr = (*mat->ops->realpart)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),388,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
389#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
390 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
391 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
392 }
393#endif
394 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
395}
396
397/*@C
398 MatGetGhosts - Get the global index of all ghost nodes defined by the sparse matrix
399
400 Collective on Mat
401
402 Input Parameter:
403. mat - the matrix
404
405 Output Parameters:
406+ nghosts - number of ghosts (note for BAIJ matrices there is one ghost for each block)
407- ghosts - the global indices of the ghost points
408
409 Notes:
410 the nghosts and ghosts are suitable to pass into VecCreateGhost()
411
412 Level: advanced
413
414@*/
415PetscErrorCode MatGetGhosts(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
416{
417 PetscErrorCode ierr;
418
419 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 419; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
420 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),420,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),420,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),420,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),420,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
421 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),421,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
422 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),422,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
423 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),423,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
424 if (!mat->ops->getghosts) {
425 if (nghosts) *nghosts = 0;
426 if (ghosts) *ghosts = 0;
427 } else {
428 ierr = (*mat->ops->getghosts)(mat,nghosts,ghosts);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
429 }
430 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
431}
432
433
434/*@
435 MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
436
437 Logically Collective on Mat
438
439 Input Parameters:
440. mat - the matrix
441
442 Level: advanced
443
444
445.seealso: MatRealPart()
446@*/
447PetscErrorCode MatImaginaryPart(Mat mat)
448{
449 PetscErrorCode ierr;
450
451 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 451; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
452 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),452,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),452,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),452,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),452,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
453 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),453,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
454 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),454,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
455 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),455,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
456 if (!mat->ops->imaginarypart) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),456,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
457 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),457,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
458 ierr = (*mat->ops->imaginarypart)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),458,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
459#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
460 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
461 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
462 }
463#endif
464 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
465}
466
467/*@
468 MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for BAIJ matrices)
469
470 Not Collective
471
472 Input Parameter:
473. mat - the matrix
474
475 Output Parameters:
476+ missing - is any diagonal missing
477- dd - first diagonal entry that is missing (optional) on this process
478
479 Level: advanced
480
481
482.seealso: MatRealPart()
483@*/
484PetscErrorCode MatMissingDiagonal(Mat mat,PetscBool *missing,PetscInt *dd)
485{
486 PetscErrorCode ierr;
487
488 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 488; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
489 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),489,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),489,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),489,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),489,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
490 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),490,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
491 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),491,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
492 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),492,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
493 if (!mat->ops->missingdiagonal) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),493,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
494 ierr = (*mat->ops->missingdiagonal)(mat,missing,dd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),494,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
495 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
496}
497
498/*@C
499 MatGetRow - Gets a row of a matrix. You MUST call MatRestoreRow()
500 for each row that you get to ensure that your application does
501 not bleed memory.
502
503 Not Collective
504
505 Input Parameters:
506+ mat - the matrix
507- row - the row to get
508
509 Output Parameters:
510+ ncols - if not NULL, the number of nonzeros in the row
511. cols - if not NULL, the column numbers
512- vals - if not NULL, the values
513
514 Notes:
515 This routine is provided for people who need to have direct access
516 to the structure of a matrix. We hope that we provide enough
517 high-level matrix routines that few users will need it.
518
519 MatGetRow() always returns 0-based column indices, regardless of
520 whether the internal representation is 0-based (default) or 1-based.
521
522 For better efficiency, set cols and/or vals to NULL if you do
523 not wish to extract these quantities.
524
525 The user can only examine the values extracted with MatGetRow();
526 the values cannot be altered. To change the matrix entries, one
527 must use MatSetValues().
528
529 You can only have one call to MatGetRow() outstanding for a particular
530 matrix at a time, per processor. MatGetRow() can only obtain rows
531 associated with the given processor, it cannot get rows from the
532 other processors; for that we suggest using MatCreateSubMatrices(), then
533 MatGetRow() on the submatrix. The row index passed to MatGetRow()
534 is in the global number of rows.
535
536 Fortran Notes:
537 The calling sequence from Fortran is
538.vb
539 MatGetRow(matrix,row,ncols,cols,values,ierr)
540 Mat matrix (input)
541 integer row (input)
542 integer ncols (output)
543 integer cols(maxcols) (output)
544 double precision (or double complex) values(maxcols) output
545.ve
546 where maxcols >= maximum nonzeros in any row of the matrix.
547
548
549 Caution:
550 Do not try to change the contents of the output arrays (cols and vals).
551 In some cases, this may corrupt the matrix.
552
553 Level: advanced
554
555.seealso: MatRestoreRow(), MatSetValues(), MatGetValues(), MatCreateSubMatrices(), MatGetDiagonal()
556@*/
557PetscErrorCode MatGetRow(Mat mat,PetscInt row,PetscInt *ncols,const PetscInt *cols[],const PetscScalar *vals[])
558{
559 PetscErrorCode ierr;
560 PetscInt incols;
561
562 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 562; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
563 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),563,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),563,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),563,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),563,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
564 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),564,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
565 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),565,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
566 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),566,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
567 if (!mat->ops->getrow) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),567,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
568 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),568,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
569 ierr = PetscLogEventBegin(MAT_GetRow,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetRow].active) ? (*PetscLogPLB)((MAT_GetRow),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
570 ierr = (*mat->ops->getrow)(mat,row,&incols,(PetscInt**)cols,(PetscScalar**)vals);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),570,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
571 if (ncols) *ncols = incols;
572 ierr = PetscLogEventEnd(MAT_GetRow,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetRow].active) ? (*PetscLogPLE)((MAT_GetRow),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),572,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
573 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
574}
575
576/*@
577 MatConjugate - replaces the matrix values with their complex conjugates
578
579 Logically Collective on Mat
580
581 Input Parameters:
582. mat - the matrix
583
584 Level: advanced
585
586.seealso: VecConjugate()
587@*/
588PetscErrorCode MatConjugate(Mat mat)
589{
590#if defined(PETSC_USE_COMPLEX)
591 PetscErrorCode ierr;
592
593 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 593; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
594 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),594,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),594,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),594,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),594,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
595 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),595,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
596 if (!mat->ops->conjugate) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not provided for this matrix format, send email to [email protected]")return PetscError(PetscObjectComm((PetscObject)mat),596,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Not provided for this matrix format, send email to [email protected]"
)
;
597 ierr = (*mat->ops->conjugate)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),597,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
598#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
599 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
600 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
601 }
602#endif
603 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
604#else
605 return 0;
606#endif
607}
608
609/*@C
610 MatRestoreRow - Frees any temporary space allocated by MatGetRow().
611
612 Not Collective
613
614 Input Parameters:
615+ mat - the matrix
616. row - the row to get
617. ncols, cols - the number of nonzeros and their columns
618- vals - if nonzero the column values
619
620 Notes:
621 This routine should be called after you have finished examining the entries.
622
623 This routine zeros out ncols, cols, and vals. This is to prevent accidental
624 us of the array after it has been restored. If you pass NULL, it will
625 not zero the pointers. Use of cols or vals after MatRestoreRow is invalid.
626
627 Fortran Notes:
628 The calling sequence from Fortran is
629.vb
630 MatRestoreRow(matrix,row,ncols,cols,values,ierr)
631 Mat matrix (input)
632 integer row (input)
633 integer ncols (output)
634 integer cols(maxcols) (output)
635 double precision (or double complex) values(maxcols) output
636.ve
637 Where maxcols >= maximum nonzeros in any row of the matrix.
638
639 In Fortran MatRestoreRow() MUST be called after MatGetRow()
640 before another call to MatGetRow() can be made.
641
642 Level: advanced
643
644.seealso: MatGetRow()
645@*/
646PetscErrorCode MatRestoreRow(Mat mat,PetscInt row,PetscInt *ncols,const PetscInt *cols[],const PetscScalar *vals[])
647{
648 PetscErrorCode ierr;
649
650 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 650; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
651 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),651,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),651,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),651,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),651,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
652 if (ncols) PetscValidIntPointer(ncols,3)do { if (!ncols) return PetscError(((MPI_Comm)0x44000001),652
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(ncols,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),652,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
653 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),653,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
654 if (!mat->ops->restorerow) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
655 ierr = (*mat->ops->restorerow)(mat,row,ncols,(PetscInt **)cols,(PetscScalar **)vals);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),655,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
656 if (ncols) *ncols = 0;
657 if (cols) *cols = NULL((void*)0);
658 if (vals) *vals = NULL((void*)0);
659 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
660}
661
662/*@
663 MatGetRowUpperTriangular - Sets a flag to enable calls to MatGetRow() for matrix in MATSBAIJ format.
664 You should call MatRestoreRowUpperTriangular() after calling MatGetRow/MatRestoreRow() to disable the flag.
665
666 Not Collective
667
668 Input Parameters:
669+ mat - the matrix
670
671 Notes:
672 The flag is to ensure that users are aware of MatGetRow() only provides the upper trianglular part of the row for the matrices in MATSBAIJ format.
673
674 Level: advanced
675
676.seealso: MatRestoreRowUpperTriangular()
677@*/
678PetscErrorCode MatGetRowUpperTriangular(Mat mat)
679{
680 PetscErrorCode ierr;
681
682 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 682; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
683 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),683,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),683,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),683,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
684 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
685 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
686 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),686,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
687 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),687,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
688 if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
689 ierr = (*mat->ops->getrowuppertriangular)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),689,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
690 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
691}
692
693/*@
694 MatRestoreRowUpperTriangular - Disable calls to MatGetRow() for matrix in MATSBAIJ format.
695
696 Not Collective
697
698 Input Parameters:
699+ mat - the matrix
700
701 Notes:
702 This routine should be called after you have finished MatGetRow/MatRestoreRow().
703
704
705 Level: advanced
706
707.seealso: MatGetRowUpperTriangular()
708@*/
709PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
710{
711 PetscErrorCode ierr;
712
713 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 713; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
714 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),714,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),714,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),714,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),714,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
715 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),715,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
716 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),716,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
717 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),717,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
718 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),718,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
719 if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
720 ierr = (*mat->ops->restorerowuppertriangular)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),720,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
721 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
722}
723
724/*@C
725 MatSetOptionsPrefix - Sets the prefix used for searching for all
726 Mat options in the database.
727
728 Logically Collective on Mat
729
730 Input Parameter:
731+ A - the Mat context
732- prefix - the prefix to prepend to all option names
733
734 Notes:
735 A hyphen (-) must NOT be given at the beginning of the prefix name.
736 The first character of all runtime options is AUTOMATICALLY the hyphen.
737
738 Level: advanced
739
740.seealso: MatSetFromOptions()
741@*/
742PetscErrorCode MatSetOptionsPrefix(Mat A,const char prefix[])
743{
744 PetscErrorCode ierr;
745
746 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 746; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
747 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),747,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),747,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),747,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),747,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
748 ierr = PetscObjectSetOptionsPrefix((PetscObject)A,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),748,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
749 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
750}
751
752/*@C
753 MatAppendOptionsPrefix - Appends to the prefix used for searching for all
754 Mat options in the database.
755
756 Logically Collective on Mat
757
758 Input Parameters:
759+ A - the Mat context
760- prefix - the prefix to prepend to all option names
761
762 Notes:
763 A hyphen (-) must NOT be given at the beginning of the prefix name.
764 The first character of all runtime options is AUTOMATICALLY the hyphen.
765
766 Level: advanced
767
768.seealso: MatGetOptionsPrefix()
769@*/
770PetscErrorCode MatAppendOptionsPrefix(Mat A,const char prefix[])
771{
772 PetscErrorCode ierr;
773
774 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 774; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
775 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),775,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),775,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),775,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),775,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
776 ierr = PetscObjectAppendOptionsPrefix((PetscObject)A,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),776,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
777 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
778}
779
780/*@C
781 MatGetOptionsPrefix - Sets the prefix used for searching for all
782 Mat options in the database.
783
784 Not Collective
785
786 Input Parameter:
787. A - the Mat context
788
789 Output Parameter:
790. prefix - pointer to the prefix string used
791
792 Notes:
793 On the fortran side, the user should pass in a string 'prefix' of
794 sufficient length to hold the prefix.
795
796 Level: advanced
797
798.seealso: MatAppendOptionsPrefix()
799@*/
800PetscErrorCode MatGetOptionsPrefix(Mat A,const char *prefix[])
801{
802 PetscErrorCode ierr;
803
804 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 804; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
805 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),805,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),805,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),805,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),805,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
806 ierr = PetscObjectGetOptionsPrefix((PetscObject)A,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),806,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
807 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
808}
809
810/*@
811 MatResetPreallocation - Reset mat to use the original nonzero pattern provided by users.
812
813 Collective on Mat
814
815 Input Parameters:
816. A - the Mat context
817
818 Notes:
819 The allocated memory will be shrunk after calling MatAssembly with MAT_FINAL_ASSEMBLY. Users can reset the preallocation to access the original memory.
820 Currently support MPIAIJ and SEQAIJ.
821
822 Level: beginner
823
824.seealso: MatSeqAIJSetPreallocation(), MatMPIAIJSetPreallocation(), MatXAIJSetPreallocation()
825@*/
826PetscErrorCode MatResetPreallocation(Mat A)
827{
828 PetscErrorCode ierr;
829
830 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 830; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
831 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),831,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),831,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),831,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),831,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
832 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),832,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
833 ierr = PetscUseMethod(A,"MatResetPreallocation_C",(Mat),(A))0; do { PetscErrorCode (*f)(Mat), __ierr; __ierr = PetscObjectQueryFunction_Private
(((PetscObject)A),("MatResetPreallocation_C"),(PetscVoidFunction
*)(&f));do {if (__builtin_expect(!!(__ierr),0)) return PetscError
(((MPI_Comm)0x44000001),833,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,__ierr,PETSC_ERROR_REPEAT," ");} while (0); if (f) {__ierr =
(*f)(A);do {if (__builtin_expect(!!(__ierr),0)) return PetscError
(((MPI_Comm)0x44000001),833,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,__ierr,PETSC_ERROR_REPEAT," ");} while (0);} else return PetscError
(PetscObjectComm((PetscObject)A),833,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Cannot locate function %s in object"
,"MatResetPreallocation_C"); } while(0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),833,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
834 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
835}
836
837
838/*@
839 MatSetUp - Sets up the internal matrix data structures for the later use.
840
841 Collective on Mat
842
843 Input Parameters:
844. A - the Mat context
845
846 Notes:
847 If the user has not set preallocation for this matrix then a default preallocation that is likely to be inefficient is used.
848
849 If a suitable preallocation routine is used, this function does not need to be called.
850
851 See the Performance chapter of the PETSc users manual for how to preallocate matrices
852
853 Level: beginner
854
855.seealso: MatCreate(), MatDestroy()
856@*/
857PetscErrorCode MatSetUp(Mat A)
858{
859 PetscMPIInt size;
860 PetscErrorCode ierr;
861
862 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 862; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
863 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),863,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),863,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),863,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),863,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
864 if (!((PetscObject)A)->type_name) {
865 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A), &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),865,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
866 if (size == 1) {
867 ierr = MatSetType(A, MATSEQAIJ"seqaij");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),867,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
868 } else {
869 ierr = MatSetType(A, MATMPIAIJ"mpiaij");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),869,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
870 }
871 }
872 if (!A->preallocated && A->ops->setup) {
873 ierr = PetscInfo(A,"Warning not preallocating matrix storage\n")PetscInfo_Private(__func__,A,"Warning not preallocating matrix storage\n"
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),873,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
874 ierr = (*A->ops->setup)(A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),874,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
875 }
876 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),876,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
877 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),877,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
878 A->preallocated = PETSC_TRUE;
879 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
880}
881
882#if defined(PETSC_HAVE_SAWS)
883#include <petscviewersaws.h>
884#endif
885/*@C
886 MatView - Visualizes a matrix object.
887
888 Collective on Mat
889
890 Input Parameters:
891+ mat - the matrix
892- viewer - visualization context
893
894 Notes:
895 The available visualization contexts include
896+ PETSC_VIEWER_STDOUT_SELF - for sequential matrices
897. PETSC_VIEWER_STDOUT_WORLD - for parallel matrices created on PETSC_COMM_WORLD
898. PETSC_VIEWER_STDOUT_(comm) - for matrices created on MPI communicator comm
899- PETSC_VIEWER_DRAW_WORLD - graphical display of nonzero structure
900
901 The user can open alternative visualization contexts with
902+ PetscViewerASCIIOpen() - Outputs matrix to a specified file
903. PetscViewerBinaryOpen() - Outputs matrix in binary to a
904 specified file; corresponding input uses MatLoad()
905. PetscViewerDrawOpen() - Outputs nonzero matrix structure to
906 an X window display
907- PetscViewerSocketOpen() - Outputs matrix to Socket viewer.
908 Currently only the sequential dense and AIJ
909 matrix types support the Socket viewer.
910
911 The user can call PetscViewerPushFormat() to specify the output
912 format of ASCII printed objects (when using PETSC_VIEWER_STDOUT_SELF,
913 PETSC_VIEWER_STDOUT_WORLD and PetscViewerASCIIOpen). Available formats include
914+ PETSC_VIEWER_DEFAULT - default, prints matrix contents
915. PETSC_VIEWER_ASCII_MATLAB - prints matrix contents in Matlab format
916. PETSC_VIEWER_ASCII_DENSE - prints entire matrix including zeros
917. PETSC_VIEWER_ASCII_COMMON - prints matrix contents, using a sparse
918 format common among all matrix types
919. PETSC_VIEWER_ASCII_IMPL - prints matrix contents, using an implementation-specific
920 format (which is in many cases the same as the default)
921. PETSC_VIEWER_ASCII_INFO - prints basic information about the matrix
922 size and structure (not the matrix entries)
923- PETSC_VIEWER_ASCII_INFO_DETAIL - prints more detailed information about
924 the matrix structure
925
926 Options Database Keys:
927+ -mat_view ::ascii_info - Prints info on matrix at conclusion of MatAssemblyEnd()
928. -mat_view ::ascii_info_detail - Prints more detailed info
929. -mat_view - Prints matrix in ASCII format
930. -mat_view ::ascii_matlab - Prints matrix in Matlab format
931. -mat_view draw - PetscDraws nonzero structure of matrix, using MatView() and PetscDrawOpenX().
932. -display <name> - Sets display name (default is host)
933. -draw_pause <sec> - Sets number of seconds to pause after display
934. -mat_view socket - Sends matrix to socket, can be accessed from Matlab (see Users-Manual: ch_matlab for details)
935. -viewer_socket_machine <machine> -
936. -viewer_socket_port <port> -
937. -mat_view binary - save matrix to file in binary format
938- -viewer_binary_filename <name> -
939 Level: beginner
940
941 Notes:
942 The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
943 the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
944
945 See the manual page for MatLoad() for the exact format of the binary file when the binary
946 viewer is used.
947
948 See share/petsc/matlab/PetscBinaryRead.m for a Matlab code that can read in the binary file when the binary
949 viewer is used.
950
951 One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
952 and then use the following mouse functions.
953+ left mouse: zoom in
954. middle mouse: zoom out
955- right mouse: continue with the simulation
956
957.seealso: PetscViewerPushFormat(), PetscViewerASCIIOpen(), PetscViewerDrawOpen(),
958 PetscViewerSocketOpen(), PetscViewerBinaryOpen(), MatLoad()
959@*/
960PetscErrorCode MatView(Mat mat,PetscViewer viewer)
961{
962 PetscErrorCode ierr;
963 PetscInt rows,cols,rbs,cbs;
964 PetscBool iascii,ibinary,isstring;
965 PetscViewerFormat format;
966 PetscMPIInt size;
967#if defined(PETSC_HAVE_SAWS)
968 PetscBool issaws;
969#endif
970
971 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 971; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
972 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),972,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),972,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),972,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),972,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
973 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),973,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
974 if (!viewer) {
975 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat),&viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),975,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
976 }
977 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2)do { if (!viewer) return PetscError(((MPI_Comm)0x44000001),977
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(viewer,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),977,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(viewer))->classid != PETSC_VIEWER_CLASSID
) { if (((PetscObject)(viewer))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),977,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),977,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
978 PetscCheckSameComm(mat,1,viewer,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)viewer),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)
) return PetscError(((MPI_Comm)0x44000001),978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
979 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),979,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
980 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),980,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
981 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),981,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
982 if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
983 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY"binary",&ibinary);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),983,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
984 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING"string",&isstring);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
985 if (ibinary) {
986 PetscBool mpiio;
987 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&mpiio);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),987,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
988 if (mpiio) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"PETSc matrix viewers do not support using MPI-IO, turn off that flag")return PetscError(PetscObjectComm((PetscObject)viewer),988,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"PETSc matrix viewers do not support using MPI-IO, turn off that flag"
)
;
989 }
990
991 ierr = PetscLogEventBegin(MAT_View,mat,viewer,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_View].active) ? (*PetscLogPLB)((MAT_View),0,(PetscObject
)(mat),(PetscObject)(viewer),(PetscObject)(0),(PetscObject)(0
)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),991,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
992 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII"ascii",&iascii);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),992,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
993 if ((!iascii || (format != PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) && mat->factortype) {
994 SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"No viewers for factored matrix except ASCII info or info_detailed")return PetscError(PetscObjectComm((PetscObject)mat),994,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"No viewers for factored matrix except ASCII info or info_detailed"
)
;
995 }
996
997#if defined(PETSC_HAVE_SAWS)
998 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS"saws",&issaws);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
999#endif
1000 if (iascii) {
1001 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ORDER,"Must call MatAssemblyBegin/End() before viewing matrix")return PetscError(PetscObjectComm((PetscObject)mat),1001,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",58,PETSC_ERROR_INITIAL
,"Must call MatAssemblyBegin/End() before viewing matrix")
;
1002 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1002,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1003 if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1004 MatNullSpace nullsp,transnullsp;
1005
1006 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1006,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1007 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1007,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1008 ierr = MatGetBlockSizes(mat,&rbs,&cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1008,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1009 if (rbs != 1 || cbs != 1) {
1010 if (rbs != cbs) {ierr = PetscViewerASCIIPrintf(viewer,"rows=%D, cols=%D, rbs=%D, cbs = %D\n",rows,cols,rbs,cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1010,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1011 else {ierr = PetscViewerASCIIPrintf(viewer,"rows=%D, cols=%D, bs=%D\n",rows,cols,rbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1011,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1012 } else {
1013 ierr = PetscViewerASCIIPrintf(viewer,"rows=%D, cols=%D\n",rows,cols);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1013,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1014 }
1015 if (mat->factortype) {
1016 MatSolverType solver;
1017 ierr = MatFactorGetSolverType(mat,&solver);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1017,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1018 ierr = PetscViewerASCIIPrintf(viewer,"package used to perform factorization: %s\n",solver);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1018,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1019 }
1020 if (mat->ops->getinfo) {
1021 MatInfo info;
1022 ierr = MatGetInfo(mat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1022,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1023 ierr = PetscViewerASCIIPrintf(viewer,"total: nonzeros=%.f, allocated nonzeros=%.f\n",info.nz_used,info.nz_allocated);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1023,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1024 ierr = PetscViewerASCIIPrintf(viewer,"total number of mallocs used during MatSetValues calls =%D\n",(PetscInt)info.mallocs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1025 }
1026 ierr = MatGetNullSpace(mat,&nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1026,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1027 ierr = MatGetTransposeNullSpace(mat,&transnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1027,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1028 if (nullsp) {ierr = PetscViewerASCIIPrintf(viewer," has attached null space\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1028,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1029 if (transnullsp && transnullsp != nullsp) {ierr = PetscViewerASCIIPrintf(viewer," has attached transposed null space\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1029,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1030 ierr = MatGetNearNullSpace(mat,&nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1030,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1031 if (nullsp) {ierr = PetscViewerASCIIPrintf(viewer," has attached near null space\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1031,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1032 }
1033#if defined(PETSC_HAVE_SAWS)
1034 } else if (issaws) {
1035 PetscMPIInt rank;
1036
1037 ierr = PetscObjectName((PetscObject)mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1037,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1038 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1038,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1039 if (!((PetscObject)mat)->amsmem && !rank) {
1040 ierr = PetscObjectViewSAWs((PetscObject)mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1040,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1041 }
1042#endif
1043 } else if (isstring) {
1044 const char *type;
1045 ierr = MatGetType(mat,&type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1046 ierr = PetscViewerStringSPrintf(viewer," MatType: %-7.7s",type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1046,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1047 if (mat->ops->view) {ierr = (*mat->ops->view)(mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1047,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1048 }
1049 if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1050 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1050,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1051 ierr = (*mat->ops->viewnative)(mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1051,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1052 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1052,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1053 } else if (mat->ops->view) {
1054 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1054,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1055 ierr = (*mat->ops->view)(mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1055,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1056 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1056,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1057 }
1058 if (iascii) {
1059 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1059,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1060 if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1061 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1061,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1062 }
1063 }
1064 ierr = PetscLogEventEnd(MAT_View,mat,viewer,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_View].active) ? (*PetscLogPLE)((MAT_View),0,(PetscObject
)(mat),(PetscObject)(viewer),(PetscObject)(0),(PetscObject)(0
)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1064,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1065 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1066}
1067
1068#if defined(PETSC_USE_DEBUG1)
1069#include <../src/sys/totalview/tv_data_display.h>
1070PETSC_UNUSED__attribute((unused)) static int TV_display_type(const struct _p_Mat *mat)
1071{
1072 TV_add_row("Local rows", "int", &mat->rmap->n);
1073 TV_add_row("Local columns", "int", &mat->cmap->n);
1074 TV_add_row("Global rows", "int", &mat->rmap->N);
1075 TV_add_row("Global columns", "int", &mat->cmap->N);
1076 TV_add_row("Typename", TV_ascii_string_type"$string", ((PetscObject)mat)->type_name);
1077 return TV_format_OK;
1078}
1079#endif
1080
1081/*@C
1082 MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1083 with MatView(). The matrix format is determined from the options database.
1084 Generates a parallel MPI matrix if the communicator has more than one
1085 processor. The default matrix type is AIJ.
1086
1087 Collective on PetscViewer
1088
1089 Input Parameters:
1090+ newmat - the newly loaded matrix, this needs to have been created with MatCreate()
1091 or some related function before a call to MatLoad()
1092- viewer - binary/HDF5 file viewer
1093
1094 Options Database Keys:
1095 Used with block matrix formats (MATSEQBAIJ, ...) to specify
1096 block size
1097. -matload_block_size <bs>
1098
1099 Level: beginner
1100
1101 Notes:
1102 If the Mat type has not yet been given then MATAIJ is used, call MatSetFromOptions() on the
1103 Mat before calling this routine if you wish to set it from the options database.
1104
1105 MatLoad() automatically loads into the options database any options
1106 given in the file filename.info where filename is the name of the file
1107 that was passed to the PetscViewerBinaryOpen(). The options in the info
1108 file will be ignored if you use the -viewer_binary_skip_info option.
1109
1110 If the type or size of newmat is not set before a call to MatLoad, PETSc
1111 sets the default matrix type AIJ and sets the local and global sizes.
1112 If type and/or size is already set, then the same are used.
1113
1114 In parallel, each processor can load a subset of rows (or the
1115 entire matrix). This routine is especially useful when a large
1116 matrix is stored on disk and only part of it is desired on each
1117 processor. For example, a parallel solver may access only some of
1118 the rows from each processor. The algorithm used here reads
1119 relatively small blocks of data rather than reading the entire
1120 matrix and then subsetting it.
1121
1122 Viewer's PetscViewerType must be either PETSCVIEWERBINARY or PETSCVIEWERHDF5.
1123 Such viewer can be created using PetscViewerBinaryOpen()/PetscViewerHDF5Open(),
1124 or the sequence like
1125$ PetscViewer v;
1126$ PetscViewerCreate(PETSC_COMM_WORLD,&v);
1127$ PetscViewerSetType(v,PETSCVIEWERBINARY);
1128$ PetscViewerSetFromOptions(v);
1129$ PetscViewerFileSetMode(v,FILE_MODE_READ);
1130$ PetscViewerFileSetName(v,"datafile");
1131 The optional PetscViewerSetFromOptions() call allows to override PetscViewerSetType() using option
1132$ -viewer_type {binary,hdf5}
1133
1134 See the example src/ksp/ksp/examples/tutorials/ex27.c with the first approach,
1135 and src/mat/examples/tutorials/ex10.c with the second approach.
1136
1137 Notes about the PETSc binary format:
1138 In case of PETSCVIEWERBINARY, a native PETSc binary format is used. Each of the blocks
1139 is read onto rank 0 and then shipped to its destination rank, one after another.
1140 Multiple objects, both matrices and vectors, can be stored within the same file.
1141 Their PetscObject name is ignored; they are loaded in the order of their storage.
1142
1143 Most users should not need to know the details of the binary storage
1144 format, since MatLoad() and MatView() completely hide these details.
1145 But for anyone who's interested, the standard binary matrix storage
1146 format is
1147
1148$ int MAT_FILE_CLASSID
1149$ int number of rows
1150$ int number of columns
1151$ int total number of nonzeros
1152$ int *number nonzeros in each row
1153$ int *column indices of all nonzeros (starting index is zero)
1154$ PetscScalar *values of all nonzeros
1155
1156 PETSc automatically does the byte swapping for
1157machines that store the bytes reversed, e.g. DEC alpha, freebsd,
1158linux, Windows and the paragon; thus if you write your own binary
1159read/write routines you have to swap the bytes; see PetscBinaryRead()
1160and PetscBinaryWrite() to see how this may be done.
1161
1162 Notes about the HDF5 (MATLAB MAT-File Version 7.3) format:
1163 In case of PETSCVIEWERHDF5, a parallel HDF5 reader is used.
1164 Each processor's chunk is loaded independently by its owning rank.
1165 Multiple objects, both matrices and vectors, can be stored within the same file.
1166 They are looked up by their PetscObject name.
1167
1168 As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1169 by default the same structure and naming of the AIJ arrays and column count
1170 within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1171$ save example.mat A b -v7.3
1172 can be directly read by this routine (see Reference 1 for details).
1173 Note that depending on your MATLAB version, this format might be a default,
1174 otherwise you can set it as default in Preferences.
1175
1176 Unless -nocompression flag is used to save the file in MATLAB,
1177 PETSc must be configured with ZLIB package.
1178
1179 See also examples src/mat/examples/tutorials/ex10.c and src/ksp/ksp/examples/tutorials/ex27.c
1180
1181 Current HDF5 (MAT-File) limitations:
1182 This reader currently supports only real MATSEQAIJ, MATMPIAIJ, MATSEQDENSE and MATMPIDENSE matrices.
1183
1184 Corresponding MatView() is not yet implemented.
1185
1186 The loaded matrix is actually a transpose of the original one in MATLAB,
1187 unless you push PETSC_VIEWER_HDF5_MAT format (see examples above).
1188 With this format, matrix is automatically transposed by PETSc,
1189 unless the matrix is marked as SPD or symmetric
1190 (see MatSetOption(), MAT_SPD, MAT_SYMMETRIC).
1191
1192 References:
11931. MATLAB(R) Documentation, manual page of save(), https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version
1194
1195.seealso: PetscViewerBinaryOpen(), PetscViewerSetType(), MatView(), VecLoad()
1196
1197 @*/
1198PetscErrorCode MatLoad(Mat newmat,PetscViewer viewer)
1199{
1200 PetscErrorCode ierr;
1201 PetscBool flg;
1202
1203 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1203; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1204 PetscValidHeaderSpecific(newmat,MAT_CLASSID,1)do { if (!newmat) return PetscError(((MPI_Comm)0x44000001),1204
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(newmat,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),1204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(newmat))->classid != MAT_CLASSID) {
if (((PetscObject)(newmat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),1204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1204,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1205 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2)do { if (!viewer) return PetscError(((MPI_Comm)0x44000001),1205
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(viewer,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),1205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(viewer))->classid != PETSC_VIEWER_CLASSID
) { if (((PetscObject)(viewer))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),1205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),1205,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
1206
1207 if (!((PetscObject)newmat)->type_name) {
1208 ierr = MatSetType(newmat,MATAIJ"aij");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1208,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1209 }
1210
1211 flg = PETSC_FALSE;
1212 ierr = PetscOptionsGetBool(((PetscObject)newmat)->options,((PetscObject)newmat)->prefix,"-matload_symmetric",&flg,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1212,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1213 if (flg) {
1214 ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1214,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1215 ierr = MatSetOption(newmat,MAT_SYMMETRY_ETERNAL,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1215,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1216 }
1217 flg = PETSC_FALSE;
1218 ierr = PetscOptionsGetBool(((PetscObject)newmat)->options,((PetscObject)newmat)->prefix,"-matload_spd",&flg,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1218,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1219 if (flg) {
1220 ierr = MatSetOption(newmat,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1220,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1221 }
1222
1223 if (!newmat->ops->load) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatLoad is not supported for type")return PetscError(((MPI_Comm)0x44000001),1223,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"MatLoad is not supported for type")
;
1224 ierr = PetscLogEventBegin(MAT_Load,viewer,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Load].active) ? (*PetscLogPLB)((MAT_Load),0,(PetscObject
)(viewer),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1224,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1225 ierr = (*newmat->ops->load)(newmat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1225,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1226 ierr = PetscLogEventEnd(MAT_Load,viewer,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Load].active) ? (*PetscLogPLE)((MAT_Load),0,(PetscObject
)(viewer),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1226,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1227 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1228}
1229
1230PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1231{
1232 PetscErrorCode ierr;
1233 Mat_Redundant *redund = *redundant;
1234 PetscInt i;
1235
1236 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1236; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1237 if (redund){
1238 if (redund->matseq) { /* via MatCreateSubMatrices() */
1239 ierr = ISDestroy(&redund->isrow);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1239,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1240 ierr = ISDestroy(&redund->iscol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1240,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1241 ierr = MatDestroySubMatrices(1,&redund->matseq);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1241,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1242 } else {
1243 ierr = PetscFree2(redund->send_rank,redund->recv_rank)PetscFreeA(2,1243,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(redund->send_rank),&(redund->recv_rank))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1243,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1244 ierr = PetscFree(redund->sbuf_j)((*PetscTrFree)((void*)(redund->sbuf_j),1244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund->sbuf_j) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1245 ierr = PetscFree(redund->sbuf_a)((*PetscTrFree)((void*)(redund->sbuf_a),1245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund->sbuf_a) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1246 for (i=0; i<redund->nrecvs; i++) {
1247 ierr = PetscFree(redund->rbuf_j[i])((*PetscTrFree)((void*)(redund->rbuf_j[i]),1247,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund->rbuf_j[i]) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1247,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1248 ierr = PetscFree(redund->rbuf_a[i])((*PetscTrFree)((void*)(redund->rbuf_a[i]),1248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund->rbuf_a[i]) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1249 }
1250 ierr = PetscFree4(redund->sbuf_nz,redund->rbuf_nz,redund->rbuf_j,redund->rbuf_a)PetscFreeA(4,1250,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(redund->sbuf_nz),&(redund->rbuf_nz),&(redund
->rbuf_j),&(redund->rbuf_a))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1250,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1251 }
1252
1253 if (redund->subcomm) {
1254 ierr = PetscCommDestroy(&redund->subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1254,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1255 }
1256 ierr = PetscFree(redund)((*PetscTrFree)((void*)(redund),1256,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1256,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1257 }
1258 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1259}
1260
1261/*@
1262 MatDestroy - Frees space taken by a matrix.
1263
1264 Collective on Mat
1265
1266 Input Parameter:
1267. A - the matrix
1268
1269 Level: beginner
1270
1271@*/
1272PetscErrorCode MatDestroy(Mat *A)
1273{
1274 PetscErrorCode ierr;
1275
1276 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1276; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1277 if (!*A) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1278 PetscValidHeaderSpecific(*A,MAT_CLASSID,1)do { if (!*A) return PetscError(((MPI_Comm)0x44000001),1278,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(*A,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),1278,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(*A))->classid != MAT_CLASSID) { if
(((PetscObject)(*A))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),1278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1278,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1279 if (--((PetscObject)(*A))->refct > 0) {*A = NULL((void*)0); PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;}
1280
1281 /* if memory was published with SAWs then destroy it */
1282 ierr = PetscObjectSAWsViewOff((PetscObject)*A)0;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1282,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1283 if ((*A)->ops->destroy) {
1284 ierr = (*(*A)->ops->destroy)(*A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1284,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1285 }
1286
1287 ierr = PetscFree((*A)->defaultvectype)((*PetscTrFree)((void*)((*A)->defaultvectype),1287,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c") || (
((*A)->defaultvectype) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1287,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1288 ierr = PetscFree((*A)->bsizes)((*PetscTrFree)((void*)((*A)->bsizes),1288,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || (((*A)->bsizes) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1288,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1289 ierr = PetscFree((*A)->solvertype)((*PetscTrFree)((void*)((*A)->solvertype),1289,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || (((*A)->solvertype) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1289,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1290 ierr = MatDestroy_Redundant(&(*A)->redundant);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1290,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1291 ierr = MatNullSpaceDestroy(&(*A)->nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1291,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1292 ierr = MatNullSpaceDestroy(&(*A)->transnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1292,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1293 ierr = MatNullSpaceDestroy(&(*A)->nearnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1293,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1294 ierr = MatDestroy(&(*A)->schur);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1295 ierr = PetscLayoutDestroy(&(*A)->rmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1295,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1296 ierr = PetscLayoutDestroy(&(*A)->cmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1296,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1297 ierr = PetscHeaderDestroy(A)(PetscHeaderDestroy_Private((PetscObject)(*A)) || ((*PetscTrFree
)((void*)(*A),1297,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((*A) = 0,0)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1297,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1298 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1299}
1300
1301/*@C
1302 MatSetValues - Inserts or adds a block of values into a matrix.
1303 These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
1304 MUST be called after all calls to MatSetValues() have been completed.
1305
1306 Not Collective
1307
1308 Input Parameters:
1309+ mat - the matrix
1310. v - a logically two-dimensional array of values
1311. m, idxm - the number of rows and their global indices
1312. n, idxn - the number of columns and their global indices
1313- addv - either ADD_VALUES or INSERT_VALUES, where
1314 ADD_VALUES adds values to any existing entries, and
1315 INSERT_VALUES replaces existing entries with new values
1316
1317 Notes:
1318 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
1319 MatSetUp() before using this routine
1320
1321 By default the values, v, are row-oriented. See MatSetOption() for other options.
1322
1323 Calls to MatSetValues() with the INSERT_VALUES and ADD_VALUES
1324 options cannot be mixed without intervening calls to the assembly
1325 routines.
1326
1327 MatSetValues() uses 0-based row and column numbers in Fortran
1328 as well as in C.
1329
1330 Negative indices may be passed in idxm and idxn, these rows and columns are
1331 simply ignored. This allows easily inserting element stiffness matrices
1332 with homogeneous Dirchlet boundary conditions that you don't want represented
1333 in the matrix.
1334
1335 Efficiency Alert:
1336 The routine MatSetValuesBlocked() may offer much better efficiency
1337 for users of block sparse formats (MATSEQBAIJ and MATMPIBAIJ).
1338
1339 Level: beginner
1340
1341 Developer Notes:
1342 This is labeled with C so does not automatically generate Fortran stubs and interfaces
1343 because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1344
1345.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1346 InsertMode, INSERT_VALUES, ADD_VALUES
1347@*/
1348PetscErrorCode MatSetValues(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],const PetscScalar v[],InsertMode addv)
1349{
1350 PetscErrorCode ierr;
1351#if defined(PETSC_USE_DEBUG1)
1352 PetscInt i,j;
1353#endif
1354
1355 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1355; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
1356 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1356,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1356,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1356,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1356,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1357 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1357,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1358 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
1359 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1359
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1359,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
1360 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1360
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1360,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,5); } while (0)
;
1361 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),1361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
1362
1363 if (mat->insertmode == NOT_SET_VALUES) {
1364 mat->insertmode = addv;
1365 }
1366#if defined(PETSC_USE_DEBUG1)
1367 else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values")return PetscError(((MPI_Comm)0x44000001),1367,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add values and insert values"
)
;
1368 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1368,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1369 if (!mat->ops->setvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),1369,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
1370
1371 for (i=0; i<m; i++) {
1372 for (j=0; j<n; j++) {
1373 if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i*n+j]))
1374#if defined(PETSC_USE_COMPLEX)
1375 SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_FP,"Inserting %g+ig at matrix entry (%D,%D)",(double)PetscRealPart(v[i*n+j]),(double)PetscImaginaryPart(v[i*n+j]),idxm[i],idxn[j])return PetscError(((MPI_Comm)0x44000001),1375,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,72,PETSC_ERROR_INITIAL,"Inserting %g+ig at matrix entry (%D,%D)"
,(double)(v[i*n+j]),(double)((PetscReal)0.),idxm[i],idxn[j])
;
1376#else
1377 SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FP,"Inserting %g at matrix entry (%D,%D)",(double)v[i*n+j],idxm[i],idxn[j])return PetscError(((MPI_Comm)0x44000001),1377,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,72,PETSC_ERROR_INITIAL,"Inserting %g at matrix entry (%D,%D)"
,(double)v[i*n+j],idxm[i],idxn[j])
;
1378#endif
1379 }
1380 }
1381#endif
1382
1383 if (mat->assembled) {
1384 mat->was_assembled = PETSC_TRUE;
1385 mat->assembled = PETSC_FALSE;
1386 }
1387 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1387,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1388 ierr = (*mat->ops->setvalues)(mat,m,idxm,n,idxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1388,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1389 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1389,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1390#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
1391 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
1392 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
1393 }
1394#endif
1395 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1396}
1397
1398
1399/*@
1400 MatSetValuesRowLocal - Inserts a row (block row for BAIJ matrices) of nonzero
1401 values into a matrix
1402
1403 Not Collective
1404
1405 Input Parameters:
1406+ mat - the matrix
1407. row - the (block) row to set
1408- v - a logically two-dimensional array of values
1409
1410 Notes:
1411 By the values, v, are column-oriented (for the block version) and sorted
1412
1413 All the nonzeros in the row must be provided
1414
1415 The matrix must have previously had its column indices set
1416
1417 The row must belong to this process
1418
1419 Level: intermediate
1420
1421.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1422 InsertMode, INSERT_VALUES, ADD_VALUES, MatSetValues(), MatSetValuesRow(), MatSetLocalToGlobalMapping()
1423@*/
1424PetscErrorCode MatSetValuesRowLocal(Mat mat,PetscInt row,const PetscScalar v[])
1425{
1426 PetscErrorCode ierr;
1427 PetscInt globalrow;
1428
1429 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1429; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1430 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1430,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1430,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1430,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1430,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1431 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1431,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1432 PetscValidScalarPointer(v,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1432,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1432,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,2); } while (0)
;
1433 ierr = ISLocalToGlobalMappingApply(mat->rmap->mapping,1,&row,&globalrow);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1433,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1434 ierr = MatSetValuesRow(mat,globalrow,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1434,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1435#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
1436 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
1437 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
1438 }
1439#endif
1440 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1441}
1442
1443/*@
1444 MatSetValuesRow - Inserts a row (block row for BAIJ matrices) of nonzero
1445 values into a matrix
1446
1447 Not Collective
1448
1449 Input Parameters:
1450+ mat - the matrix
1451. row - the (block) row to set
1452- v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1453
1454 Notes:
1455 The values, v, are column-oriented for the block version.
1456
1457 All the nonzeros in the row must be provided
1458
1459 THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually MatSetValues() is used.
1460
1461 The row must belong to this process
1462
1463 Level: advanced
1464
1465.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1466 InsertMode, INSERT_VALUES, ADD_VALUES, MatSetValues()
1467@*/
1468PetscErrorCode MatSetValuesRow(Mat mat,PetscInt row,const PetscScalar v[])
1469{
1470 PetscErrorCode ierr;
1471
1472 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1472; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
1473 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1473,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1473,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1473,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1473,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1474 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1474,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1475 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),1475,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
1476 PetscValidScalarPointer(v,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1476,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1476,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,2); } while (0)
;
1477#if defined(PETSC_USE_DEBUG1)
1478 if (mat->insertmode == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add and insert values")return PetscError(((MPI_Comm)0x44000001),1478,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add and insert values")
;
1479 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1479,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1480#endif
1481 mat->insertmode = INSERT_VALUES;
1482
1483 if (mat->assembled) {
1484 mat->was_assembled = PETSC_TRUE;
1485 mat->assembled = PETSC_FALSE;
1486 }
1487 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1487,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1488 if (!mat->ops->setvaluesrow) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),1488,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
1489 ierr = (*mat->ops->setvaluesrow)(mat,row,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1489,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1490 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1490,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1491#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
1492 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
1493 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
1494 }
1495#endif
1496 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1497}
1498
1499/*@
1500 MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1501 Using structured grid indexing
1502
1503 Not Collective
1504
1505 Input Parameters:
1506+ mat - the matrix
1507. m - number of rows being entered
1508. idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1509. n - number of columns being entered
1510. idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1511. v - a logically two-dimensional array of values
1512- addv - either ADD_VALUES or INSERT_VALUES, where
1513 ADD_VALUES adds values to any existing entries, and
1514 INSERT_VALUES replaces existing entries with new values
1515
1516 Notes:
1517 By default the values, v, are row-oriented. See MatSetOption() for other options.
1518
1519 Calls to MatSetValuesStencil() with the INSERT_VALUES and ADD_VALUES
1520 options cannot be mixed without intervening calls to the assembly
1521 routines.
1522
1523 The grid coordinates are across the entire grid, not just the local portion
1524
1525 MatSetValuesStencil() uses 0-based row and column numbers in Fortran
1526 as well as in C.
1527
1528 For setting/accessing vector values via array coordinates you can use the DMDAVecGetArray() routine
1529
1530 In order to use this routine you must either obtain the matrix with DMCreateMatrix()
1531 or call MatSetLocalToGlobalMapping() and MatSetStencil() first.
1532
1533 The columns and rows in the stencil passed in MUST be contained within the
1534 ghost region of the given process as set with DMDACreateXXX() or MatSetStencil(). For example,
1535 if you create a DMDA with an overlap of one grid level and on a particular process its first
1536 local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1537 first i index you can use in your column and row indices in MatSetStencil() is 5.
1538
1539 In Fortran idxm and idxn should be declared as
1540$ MatStencil idxm(4,m),idxn(4,n)
1541 and the values inserted using
1542$ idxm(MatStencil_i,1) = i
1543$ idxm(MatStencil_j,1) = j
1544$ idxm(MatStencil_k,1) = k
1545$ idxm(MatStencil_c,1) = c
1546 etc
1547
1548 For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1549 obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1550 etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1551 DM_BOUNDARY_PERIODIC boundary type.
1552
1553 For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1554 a single value per point) you can skip filling those indices.
1555
1556 Inspired by the structured grid interface to the HYPRE package
1557 (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1558
1559 Efficiency Alert:
1560 The routine MatSetValuesBlockedStencil() may offer much better efficiency
1561 for users of block sparse formats (MATSEQBAIJ and MATMPIBAIJ).
1562
1563 Level: beginner
1564
1565.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal()
1566 MatSetValues(), MatSetValuesBlockedStencil(), MatSetStencil(), DMCreateMatrix(), DMDAVecGetArray(), MatStencil
1567@*/
1568PetscErrorCode MatSetValuesStencil(Mat mat,PetscInt m,const MatStencil idxm[],PetscInt n,const MatStencil idxn[],const PetscScalar v[],InsertMode addv)
1569{
1570 PetscErrorCode ierr;
1571 PetscInt buf[8192],*bufm=0,*bufn=0,*jdxm,*jdxn;
1572 PetscInt j,i,dim = mat->stencil.dim,*dims = mat->stencil.dims+1,tmp;
1573 PetscInt *starts = mat->stencil.starts,*dxm = (PetscInt*)idxm,*dxn = (PetscInt*)idxn,sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1574
1575 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1575; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1576 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
1577 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1577,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1577,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1577,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1577,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1578 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1578,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1579 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1579
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1579,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
1580 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1580
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,5); } while (0)
;
1581
1582 if ((m+n) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1583 jdxm = buf; jdxn = buf+m;
1584 } else {
1585 ierr = PetscMalloc2(m,&bufm,n,&bufn)PetscMallocA(2,PETSC_FALSE,1585,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(m)*sizeof(**(&bufm)),(&bufm),(size_t)(n)*sizeof
(**(&bufn)),(&bufn))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1585,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1586 jdxm = bufm; jdxn = bufn;
1587 }
1588 for (i=0; i<m; i++) {
1589 for (j=0; j<3-sdim; j++) dxm++;
1590 tmp = *dxm++ - starts[0];
1591 for (j=0; j<dim-1; j++) {
1592 if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1593 else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
1594 }
1595 if (mat->stencil.noc) dxm++;
1596 jdxm[i] = tmp;
1597 }
1598 for (i=0; i<n; i++) {
1599 for (j=0; j<3-sdim; j++) dxn++;
1600 tmp = *dxn++ - starts[0];
1601 for (j=0; j<dim-1; j++) {
1602 if ((*dxn++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1603 else tmp = tmp*dims[j] + *(dxn-1) - starts[j+1];
1604 }
1605 if (mat->stencil.noc) dxn++;
1606 jdxn[i] = tmp;
1607 }
1608 ierr = MatSetValuesLocal(mat,m,jdxm,n,jdxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1608,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1609 ierr = PetscFree2(bufm,bufn)PetscFreeA(2,1609,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufm),&(bufn))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1609,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1610 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1611}
1612
1613/*@
1614 MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1615 Using structured grid indexing
1616
1617 Not Collective
1618
1619 Input Parameters:
1620+ mat - the matrix
1621. m - number of rows being entered
1622. idxm - grid coordinates for matrix rows being entered
1623. n - number of columns being entered
1624. idxn - grid coordinates for matrix columns being entered
1625. v - a logically two-dimensional array of values
1626- addv - either ADD_VALUES or INSERT_VALUES, where
1627 ADD_VALUES adds values to any existing entries, and
1628 INSERT_VALUES replaces existing entries with new values
1629
1630 Notes:
1631 By default the values, v, are row-oriented and unsorted.
1632 See MatSetOption() for other options.
1633
1634 Calls to MatSetValuesBlockedStencil() with the INSERT_VALUES and ADD_VALUES
1635 options cannot be mixed without intervening calls to the assembly
1636 routines.
1637
1638 The grid coordinates are across the entire grid, not just the local portion
1639
1640 MatSetValuesBlockedStencil() uses 0-based row and column numbers in Fortran
1641 as well as in C.
1642
1643 For setting/accessing vector values via array coordinates you can use the DMDAVecGetArray() routine
1644
1645 In order to use this routine you must either obtain the matrix with DMCreateMatrix()
1646 or call MatSetBlockSize(), MatSetLocalToGlobalMapping() and MatSetStencil() first.
1647
1648 The columns and rows in the stencil passed in MUST be contained within the
1649 ghost region of the given process as set with DMDACreateXXX() or MatSetStencil(). For example,
1650 if you create a DMDA with an overlap of one grid level and on a particular process its first
1651 local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1652 first i index you can use in your column and row indices in MatSetStencil() is 5.
1653
1654 In Fortran idxm and idxn should be declared as
1655$ MatStencil idxm(4,m),idxn(4,n)
1656 and the values inserted using
1657$ idxm(MatStencil_i,1) = i
1658$ idxm(MatStencil_j,1) = j
1659$ idxm(MatStencil_k,1) = k
1660 etc
1661
1662 Negative indices may be passed in idxm and idxn, these rows and columns are
1663 simply ignored. This allows easily inserting element stiffness matrices
1664 with homogeneous Dirchlet boundary conditions that you don't want represented
1665 in the matrix.
1666
1667 Inspired by the structured grid interface to the HYPRE package
1668 (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1669
1670 Level: beginner
1671
1672.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal()
1673 MatSetValues(), MatSetValuesStencil(), MatSetStencil(), DMCreateMatrix(), DMDAVecGetArray(), MatStencil,
1674 MatSetBlockSize(), MatSetLocalToGlobalMapping()
1675@*/
1676PetscErrorCode MatSetValuesBlockedStencil(Mat mat,PetscInt m,const MatStencil idxm[],PetscInt n,const MatStencil idxn[],const PetscScalar v[],InsertMode addv)
1677{
1678 PetscErrorCode ierr;
1679 PetscInt buf[8192],*bufm=0,*bufn=0,*jdxm,*jdxn;
1680 PetscInt j,i,dim = mat->stencil.dim,*dims = mat->stencil.dims+1,tmp;
1681 PetscInt *starts = mat->stencil.starts,*dxm = (PetscInt*)idxm,*dxn = (PetscInt*)idxn,sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1682
1683 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1683; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1684 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
1685 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1685,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1685,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1686 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1686,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1687 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1687
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1687,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
1688 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1688
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,5); } while (0)
;
1689 PetscValidScalarPointer(v,6)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1689,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1689,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,6); } while (0)
;
1690
1691 if ((m+n) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1692 jdxm = buf; jdxn = buf+m;
1693 } else {
1694 ierr = PetscMalloc2(m,&bufm,n,&bufn)PetscMallocA(2,PETSC_FALSE,1694,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(m)*sizeof(**(&bufm)),(&bufm),(size_t)(n)*sizeof
(**(&bufn)),(&bufn))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1694,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1695 jdxm = bufm; jdxn = bufn;
1696 }
1697 for (i=0; i<m; i++) {
1698 for (j=0; j<3-sdim; j++) dxm++;
1699 tmp = *dxm++ - starts[0];
1700 for (j=0; j<sdim-1; j++) {
1701 if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1702 else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
1703 }
1704 dxm++;
1705 jdxm[i] = tmp;
1706 }
1707 for (i=0; i<n; i++) {
1708 for (j=0; j<3-sdim; j++) dxn++;
1709 tmp = *dxn++ - starts[0];
1710 for (j=0; j<sdim-1; j++) {
1711 if ((*dxn++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1712 else tmp = tmp*dims[j] + *(dxn-1) - starts[j+1];
1713 }
1714 dxn++;
1715 jdxn[i] = tmp;
1716 }
1717 ierr = MatSetValuesBlockedLocal(mat,m,jdxm,n,jdxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1717,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1718 ierr = PetscFree2(bufm,bufn)PetscFreeA(2,1718,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufm),&(bufn))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1718,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1719#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
1720 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
1721 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
1722 }
1723#endif
1724 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1725}
1726
1727/*@
1728 MatSetStencil - Sets the grid information for setting values into a matrix via
1729 MatSetValuesStencil()
1730
1731 Not Collective
1732
1733 Input Parameters:
1734+ mat - the matrix
1735. dim - dimension of the grid 1, 2, or 3
1736. dims - number of grid points in x, y, and z direction, including ghost points on your processor
1737. starts - starting point of ghost nodes on your processor in x, y, and z direction
1738- dof - number of degrees of freedom per node
1739
1740
1741 Inspired by the structured grid interface to the HYPRE package
1742 (www.llnl.gov/CASC/hyper)
1743
1744 For matrices generated with DMCreateMatrix() this routine is automatically called and so not needed by the
1745 user.
1746
1747 Level: beginner
1748
1749.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal()
1750 MatSetValues(), MatSetValuesBlockedStencil(), MatSetValuesStencil()
1751@*/
1752PetscErrorCode MatSetStencil(Mat mat,PetscInt dim,const PetscInt dims[],const PetscInt starts[],PetscInt dof)
1753{
1754 PetscInt i;
1755
1756 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1756; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1757 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1757,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1757,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1757,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1757,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1758 PetscValidIntPointer(dims,3)do { if (!dims) return PetscError(((MPI_Comm)0x44000001),1758
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(dims,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1758,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
1759 PetscValidIntPointer(starts,4)do { if (!starts) return PetscError(((MPI_Comm)0x44000001),1759
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(starts,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1759,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,4); } while (0)
;
1760
1761 mat->stencil.dim = dim + (dof > 1);
1762 for (i=0; i<dim; i++) {
1763 mat->stencil.dims[i] = dims[dim-i-1]; /* copy the values in backwards */
1764 mat->stencil.starts[i] = starts[dim-i-1];
1765 }
1766 mat->stencil.dims[dim] = dof;
1767 mat->stencil.starts[dim] = 0;
1768 mat->stencil.noc = (PetscBool)(dof == 1);
1769 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1770}
1771
1772/*@C
1773 MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1774
1775 Not Collective
1776
1777 Input Parameters:
1778+ mat - the matrix
1779. v - a logically two-dimensional array of values
1780. m, idxm - the number of block rows and their global block indices
1781. n, idxn - the number of block columns and their global block indices
1782- addv - either ADD_VALUES or INSERT_VALUES, where
1783 ADD_VALUES adds values to any existing entries, and
1784 INSERT_VALUES replaces existing entries with new values
1785
1786 Notes:
1787 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call
1788 MatXXXXSetPreallocation() or MatSetUp() before using this routine.
1789
1790 The m and n count the NUMBER of blocks in the row direction and column direction,
1791 NOT the total number of rows/columns; for example, if the block size is 2 and
1792 you are passing in values for rows 2,3,4,5 then m would be 2 (not 4).
1793 The values in idxm would be 1 2; that is the first index for each block divided by
1794 the block size.
1795
1796 Note that you must call MatSetBlockSize() when constructing this matrix (before
1797 preallocating it).
1798
1799 By default the values, v, are row-oriented, so the layout of
1800 v is the same as for MatSetValues(). See MatSetOption() for other options.
1801
1802 Calls to MatSetValuesBlocked() with the INSERT_VALUES and ADD_VALUES
1803 options cannot be mixed without intervening calls to the assembly
1804 routines.
1805
1806 MatSetValuesBlocked() uses 0-based row and column numbers in Fortran
1807 as well as in C.
1808
1809 Negative indices may be passed in idxm and idxn, these rows and columns are
1810 simply ignored. This allows easily inserting element stiffness matrices
1811 with homogeneous Dirchlet boundary conditions that you don't want represented
1812 in the matrix.
1813
1814 Each time an entry is set within a sparse matrix via MatSetValues(),
1815 internal searching must be done to determine where to place the
1816 data in the matrix storage space. By instead inserting blocks of
1817 entries via MatSetValuesBlocked(), the overhead of matrix assembly is
1818 reduced.
1819
1820 Example:
1821$ Suppose m=n=2 and block size(bs) = 2 The array is
1822$
1823$ 1 2 | 3 4
1824$ 5 6 | 7 8
1825$ - - - | - - -
1826$ 9 10 | 11 12
1827$ 13 14 | 15 16
1828$
1829$ v[] should be passed in like
1830$ v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
1831$
1832$ If you are not using row oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
1833$ v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
1834
1835 Level: intermediate
1836
1837.seealso: MatSetBlockSize(), MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValues(), MatSetValuesBlockedLocal()
1838@*/
1839PetscErrorCode MatSetValuesBlocked(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],const PetscScalar v[],InsertMode addv)
1840{
1841 PetscErrorCode ierr;
1842
1843 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1843; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
1844 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1844,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1844,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1844,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1844,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1845 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1845,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1846 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
1847 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1847
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1847,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
1848 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1848
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1848,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,5); } while (0)
;
1849 PetscValidScalarPointer(v,6)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1849,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1849,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,6); } while (0)
;
1850 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),1850,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
1851 if (mat->insertmode == NOT_SET_VALUES) {
1852 mat->insertmode = addv;
1853 }
1854#if defined(PETSC_USE_DEBUG1)
1855 else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values")return PetscError(((MPI_Comm)0x44000001),1855,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add values and insert values"
)
;
1856 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1856,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1857 if (!mat->ops->setvaluesblocked && !mat->ops->setvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),1857,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
1858#endif
1859
1860 if (mat->assembled) {
1861 mat->was_assembled = PETSC_TRUE;
1862 mat->assembled = PETSC_FALSE;
1863 }
1864 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1864,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1865 if (mat->ops->setvaluesblocked) {
1866 ierr = (*mat->ops->setvaluesblocked)(mat,m,idxm,n,idxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1866,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1867 } else {
1868 PetscInt buf[8192],*bufr=0,*bufc=0,*iidxm,*iidxn;
1869 PetscInt i,j,bs,cbs;
1870 ierr = MatGetBlockSizes(mat,&bs,&cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1870,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1871 if (m*bs+n*cbs <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1872 iidxm = buf; iidxn = buf + m*bs;
1873 } else {
1874 ierr = PetscMalloc2(m*bs,&bufr,n*cbs,&bufc)PetscMallocA(2,PETSC_FALSE,1874,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(m*bs)*sizeof(**(&bufr)),(&bufr),(size_t)(n*
cbs)*sizeof(**(&bufc)),(&bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1874,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1875 iidxm = bufr; iidxn = bufc;
1876 }
1877 for (i=0; i<m; i++) {
1878 for (j=0; j<bs; j++) {
1879 iidxm[i*bs+j] = bs*idxm[i] + j;
1880 }
1881 }
1882 for (i=0; i<n; i++) {
1883 for (j=0; j<cbs; j++) {
1884 iidxn[i*cbs+j] = cbs*idxn[i] + j;
1885 }
1886 }
1887 ierr = MatSetValues(mat,m*bs,iidxm,n*cbs,iidxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1887,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1888 ierr = PetscFree2(bufr,bufc)PetscFreeA(2,1888,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufr),&(bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1888,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1889 }
1890 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1890,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1891#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
1892 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
1893 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
1894 }
1895#endif
1896 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1897}
1898
1899/*@
1900 MatGetValues - Gets a block of values from a matrix.
1901
1902 Not Collective; currently only returns a local block
1903
1904 Input Parameters:
1905+ mat - the matrix
1906. v - a logically two-dimensional array for storing the values
1907. m, idxm - the number of rows and their global indices
1908- n, idxn - the number of columns and their global indices
1909
1910 Notes:
1911 The user must allocate space (m*n PetscScalars) for the values, v.
1912 The values, v, are then returned in a row-oriented format,
1913 analogous to that used by default in MatSetValues().
1914
1915 MatGetValues() uses 0-based row and column numbers in
1916 Fortran as well as in C.
1917
1918 MatGetValues() requires that the matrix has been assembled
1919 with MatAssemblyBegin()/MatAssemblyEnd(). Thus, calls to
1920 MatSetValues() and MatGetValues() CANNOT be made in succession
1921 without intermediate matrix assembly.
1922
1923 Negative row or column indices will be ignored and those locations in v[] will be
1924 left unchanged.
1925
1926 Level: advanced
1927
1928.seealso: MatGetRow(), MatCreateSubMatrices(), MatSetValues()
1929@*/
1930PetscErrorCode MatGetValues(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1931{
1932 PetscErrorCode ierr;
1933
1934 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1934; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1935 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1935,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1935,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1935,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1935,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1936 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1936,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1937 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1938 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1938
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1938,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
1939 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1939
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1939,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,5); } while (0)
;
1940 PetscValidScalarPointer(v,6)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1940,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1940,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,6); } while (0)
;
1941 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),1941,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
1942 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1942,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1943 if (!mat->ops->getvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),1943,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
1944 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),1944,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
1945
1946 ierr = PetscLogEventBegin(MAT_GetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetValues].active) ? (*PetscLogPLB)((MAT_GetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1946,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1947 ierr = (*mat->ops->getvalues)(mat,m,idxm,n,idxn,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1947,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1948 ierr = PetscLogEventEnd(MAT_GetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetValues].active) ? (*PetscLogPLE)((MAT_GetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1948,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1949 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1950}
1951
1952/*@
1953 MatSetValuesBatch - Adds (ADD_VALUES) many blocks of values into a matrix at once. The blocks must all be square and
1954 the same size. Currently, this can only be called once and creates the given matrix.
1955
1956 Not Collective
1957
1958 Input Parameters:
1959+ mat - the matrix
1960. nb - the number of blocks
1961. bs - the number of rows (and columns) in each block
1962. rows - a concatenation of the rows for each block
1963- v - a concatenation of logically two-dimensional arrays of values
1964
1965 Notes:
1966 In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
1967
1968 Level: advanced
1969
1970.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1971 InsertMode, INSERT_VALUES, ADD_VALUES, MatSetValues()
1972@*/
1973PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
1974{
1975 PetscErrorCode ierr;
1976
1977 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1977; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1978 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1978,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1978,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1979 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1979,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1980 PetscValidScalarPointer(rows,4)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),1980
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(rows,PETSC_DOUBLE)) return PetscError(((
MPI_Comm)0x44000001),1980,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,4); } while (0)
;
1981 PetscValidScalarPointer(v,5)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1981,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1981,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,5); } while (0)
;
1982#if defined(PETSC_USE_DEBUG1)
1983 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1983,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1984#endif
1985
1986 ierr = PetscLogEventBegin(MAT_SetValuesBatch,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValuesBatch].active) ? (*PetscLogPLB)((MAT_SetValuesBatch
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1987 if (mat->ops->setvaluesbatch) {
1988 ierr = (*mat->ops->setvaluesbatch)(mat,nb,bs,rows,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1989 } else {
1990 PetscInt b;
1991 for (b = 0; b < nb; ++b) {
1992 ierr = MatSetValues(mat, bs, &rows[b*bs], bs, &rows[b*bs], &v[b*bs*bs], ADD_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1992,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1993 }
1994 }
1995 ierr = PetscLogEventEnd(MAT_SetValuesBatch,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValuesBatch].active) ? (*PetscLogPLE)((MAT_SetValuesBatch
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1995,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1996 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1997}
1998
1999/*@
2000 MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2001 the routine MatSetValuesLocal() to allow users to insert matrix entries
2002 using a local (per-processor) numbering.
2003
2004 Not Collective
2005
2006 Input Parameters:
2007+ x - the matrix
2008. rmapping - row mapping created with ISLocalToGlobalMappingCreate() or ISLocalToGlobalMappingCreateIS()
2009- cmapping - column mapping
2010
2011 Level: intermediate
2012
2013
2014.seealso: MatAssemblyBegin(), MatAssemblyEnd(), MatSetValues(), MatSetValuesLocal()
2015@*/
2016PetscErrorCode MatSetLocalToGlobalMapping(Mat x,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
2017{
2018 PetscErrorCode ierr;
2019
2020 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2020; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2021 PetscValidHeaderSpecific(x,MAT_CLASSID,1)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2021,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2021,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(x))->classid != MAT_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2021,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2021,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2022 PetscValidType(x,1)do { if (!((PetscObject)x)->type_name) return PetscError((
(MPI_Comm)0x44000001),2022,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)x)->class_name,1); } while (0)
;
2023 PetscValidHeaderSpecific(rmapping,IS_LTOGM_CLASSID,2)do { if (!rmapping) return PetscError(((MPI_Comm)0x44000001),
2023,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(rmapping,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),2023,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(rmapping))->classid != IS_LTOGM_CLASSID
) { if (((PetscObject)(rmapping))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),2023,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2023,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2024 PetscValidHeaderSpecific(cmapping,IS_LTOGM_CLASSID,3)do { if (!cmapping) return PetscError(((MPI_Comm)0x44000001),
2024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(cmapping,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),2024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(cmapping))->classid != IS_LTOGM_CLASSID
) { if (((PetscObject)(cmapping))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),2024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2024,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2025
2026 if (x->ops->setlocaltoglobalmapping) {
2027 ierr = (*x->ops->setlocaltoglobalmapping)(x,rmapping,cmapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2027,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2028 } else {
2029 ierr = PetscLayoutSetISLocalToGlobalMapping(x->rmap,rmapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2029,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2030 ierr = PetscLayoutSetISLocalToGlobalMapping(x->cmap,cmapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2030,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2031 }
2032 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2033}
2034
2035
2036/*@
2037 MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by MatSetLocalToGlobalMapping()
2038
2039 Not Collective
2040
2041 Input Parameters:
2042. A - the matrix
2043
2044 Output Parameters:
2045+ rmapping - row mapping
2046- cmapping - column mapping
2047
2048 Level: advanced
2049
2050
2051.seealso: MatSetValuesLocal()
2052@*/
2053PetscErrorCode MatGetLocalToGlobalMapping(Mat A,ISLocalToGlobalMapping *rmapping,ISLocalToGlobalMapping *cmapping)
2054{
2055 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2055; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2056 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),2056,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2056,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2056,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2056,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2057 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),2057,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
2058 if (rmapping) PetscValidPointer(rmapping,2)do { if (!rmapping) return PetscError(((MPI_Comm)0x44000001),
2058,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(rmapping,PETSC_CHAR)) return PetscError(
((MPI_Comm)0x44000001),2058,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
2059 if (cmapping) PetscValidPointer(cmapping,3)do { if (!cmapping) return PetscError(((MPI_Comm)0x44000001),
2059,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(cmapping,PETSC_CHAR)) return PetscError(
((MPI_Comm)0x44000001),2059,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
2060 if (rmapping) *rmapping = A->rmap->mapping;
2061 if (cmapping) *cmapping = A->cmap->mapping;
2062 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2063}
2064
2065/*@
2066 MatGetLayouts - Gets the PetscLayout objects for rows and columns
2067
2068 Not Collective
2069
2070 Input Parameters:
2071. A - the matrix
2072
2073 Output Parameters:
2074+ rmap - row layout
2075- cmap - column layout
2076
2077 Level: advanced
2078
2079.seealso: MatCreateVecs(), MatGetLocalToGlobalMapping()
2080@*/
2081PetscErrorCode MatGetLayouts(Mat A,PetscLayout *rmap,PetscLayout *cmap)
2082{
2083 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2083; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2084 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),2084,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2084,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2084,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2084,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2085 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),2085,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
2086 if (rmap) PetscValidPointer(rmap,2)do { if (!rmap) return PetscError(((MPI_Comm)0x44000001),2086
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(rmap,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2086,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
2087 if (cmap) PetscValidPointer(cmap,3)do { if (!cmap) return PetscError(((MPI_Comm)0x44000001),2087
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(cmap,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2087,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
2088 if (rmap) *rmap = A->rmap;
2089 if (cmap) *cmap = A->cmap;
2090 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2091}
2092
2093/*@C
2094 MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2095 using a local ordering of the nodes.
2096
2097 Not Collective
2098
2099 Input Parameters:
2100+ mat - the matrix
2101. nrow, irow - number of rows and their local indices
2102. ncol, icol - number of columns and their local indices
2103. y - a logically two-dimensional array of values
2104- addv - either INSERT_VALUES or ADD_VALUES, where
2105 ADD_VALUES adds values to any existing entries, and
2106 INSERT_VALUES replaces existing entries with new values
2107
2108 Notes:
2109 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
2110 MatSetUp() before using this routine
2111
2112 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatSetLocalToGlobalMapping() before using this routine
2113
2114 Calls to MatSetValuesLocal() with the INSERT_VALUES and ADD_VALUES
2115 options cannot be mixed without intervening calls to the assembly
2116 routines.
2117
2118 These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
2119 MUST be called after all calls to MatSetValuesLocal() have been completed.
2120
2121 Level: intermediate
2122
2123 Developer Notes:
2124 This is labeled with C so does not automatically generate Fortran stubs and interfaces
2125 because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2126
2127.seealso: MatAssemblyBegin(), MatAssemblyEnd(), MatSetValues(), MatSetLocalToGlobalMapping(),
2128 MatSetValueLocal()
2129@*/
2130PetscErrorCode MatSetValuesLocal(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
2131{
2132 PetscErrorCode ierr;
2133
2134 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2134; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
2135 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2135,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2135,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2136 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2136,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2137 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2137,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2138 if (!nrow || !ncol) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
2139 PetscValidIntPointer(irow,3)do { if (!irow) return PetscError(((MPI_Comm)0x44000001),2139
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(irow,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),2139,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
2140 PetscValidIntPointer(icol,5)do { if (!icol) return PetscError(((MPI_Comm)0x44000001),2140
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(icol,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),2140,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,5); } while (0)
;
2141 if (mat->insertmode == NOT_SET_VALUES) {
2142 mat->insertmode = addv;
2143 }
2144#if defined(PETSC_USE_DEBUG1)
2145 else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values")return PetscError(((MPI_Comm)0x44000001),2145,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add values and insert values"
)
;
2146 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),2146,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
2147 if (!mat->ops->setvalueslocal && !mat->ops->setvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),2147,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
2148#endif
2149
2150 if (mat->assembled) {
2151 mat->was_assembled = PETSC_TRUE;
2152 mat->assembled = PETSC_FALSE;
2153 }
2154 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2154,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2155 if (mat->ops->setvalueslocal) {
2156 ierr = (*mat->ops->setvalueslocal)(mat,nrow,irow,ncol,icol,y,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2156,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2157 } else {
2158 PetscInt buf[8192],*bufr=0,*bufc=0,*irowm,*icolm;
2159 if ((nrow+ncol) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
2160 irowm = buf; icolm = buf+nrow;
2161 } else {
2162 ierr = PetscMalloc2(nrow,&bufr,ncol,&bufc)PetscMallocA(2,PETSC_FALSE,2162,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nrow)*sizeof(**(&bufr)),(&bufr),(size_t)(ncol
)*sizeof(**(&bufc)),(&bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2162,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2163 irowm = bufr; icolm = bufc;
2164 }
2165 ierr = ISLocalToGlobalMappingApply(mat->rmap->mapping,nrow,irow,irowm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2165,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2166 ierr = ISLocalToGlobalMappingApply(mat->cmap->mapping,ncol,icol,icolm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2166,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2167 ierr = MatSetValues(mat,nrow,irowm,ncol,icolm,y,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2167,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2168 ierr = PetscFree2(bufr,bufc)PetscFreeA(2,2168,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufr),&(bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2168,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2169 }
2170 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2170,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2171#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
2172 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
2173 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
2174 }
2175#endif
2176 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2177}
2178
2179/*@C
2180 MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2181 using a local ordering of the nodes a block at a time.
2182
2183 Not Collective
2184
2185 Input Parameters:
2186+ x - the matrix
2187. nrow, irow - number of rows and their local indices
2188. ncol, icol - number of columns and their local indices
2189. y - a logically two-dimensional array of values
2190- addv - either INSERT_VALUES or ADD_VALUES, where
2191 ADD_VALUES adds values to any existing entries, and
2192 INSERT_VALUES replaces existing entries with new values
2193
2194 Notes:
2195 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
2196 MatSetUp() before using this routine
2197
2198 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatSetBlockSize() and MatSetLocalToGlobalMapping()
2199 before using this routineBefore calling MatSetValuesLocal(), the user must first set the
2200
2201 Calls to MatSetValuesBlockedLocal() with the INSERT_VALUES and ADD_VALUES
2202 options cannot be mixed without intervening calls to the assembly
2203 routines.
2204
2205 These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
2206 MUST be called after all calls to MatSetValuesBlockedLocal() have been completed.
2207
2208 Level: intermediate
2209
2210 Developer Notes:
2211 This is labeled with C so does not automatically generate Fortran stubs and interfaces
2212 because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2213
2214.seealso: MatSetBlockSize(), MatSetLocalToGlobalMapping(), MatAssemblyBegin(), MatAssemblyEnd(),
2215 MatSetValuesLocal(), MatSetValuesBlocked()
2216@*/
2217PetscErrorCode MatSetValuesBlockedLocal(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
2218{
2219 PetscErrorCode ierr;
2220
2221 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2221; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
2222 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2222,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2222,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2223 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2223,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2224 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2224,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2225 if (!nrow || !ncol) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
2226 PetscValidIntPointer(irow,3)do { if (!irow) return PetscError(((MPI_Comm)0x44000001),2226
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(irow,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),2226,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
2227 PetscValidIntPointer(icol,5)do { if (!icol) return PetscError(((MPI_Comm)0x44000001),2227
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(icol,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),2227,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,5); } while (0)
;
2228 PetscValidScalarPointer(y,6)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2228,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(y,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),2228,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,6); } while (0)
;
2229 if (mat->insertmode == NOT_SET_VALUES) {
2230 mat->insertmode = addv;
2231 }
2232#if defined(PETSC_USE_DEBUG1)
2233 else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values")return PetscError(((MPI_Comm)0x44000001),2233,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add values and insert values"
)
;
2234 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),2234,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
2235 if (!mat->ops->setvaluesblockedlocal && !mat->ops->setvaluesblocked && !mat->ops->setvalueslocal && !mat->ops->setvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),2235,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
2236#endif
2237
2238 if (mat->assembled) {
2239 mat->was_assembled = PETSC_TRUE;
2240 mat->assembled = PETSC_FALSE;
2241 }
2242#if defined(PETSC_USE_DEBUG1)
2243 /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2244 if (mat->rmap->mapping) {
2245 PetscInt irbs, rbs;
2246 ierr = MatGetBlockSizes(mat, &rbs, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2246,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2247 ierr = ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping,&irbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2247,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2248 if (rbs != irbs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Different row block sizes! mat %D, row l2g map %D",rbs,irbs)return PetscError(PetscObjectComm((PetscObject)mat),2248,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Different row block sizes! mat %D, row l2g map %D",rbs,irbs
)
;
2249 }
2250 if (mat->cmap->mapping) {
2251 PetscInt icbs, cbs;
2252 ierr = MatGetBlockSizes(mat,NULL((void*)0),&cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2252,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2253 ierr = ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping,&icbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2253,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2254 if (cbs != icbs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Different col block sizes! mat %D, col l2g map %D",cbs,icbs)return PetscError(PetscObjectComm((PetscObject)mat),2254,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Different col block sizes! mat %D, col l2g map %D",cbs,icbs
)
;
2255 }
2256#endif
2257 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2257,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2258 if (mat->ops->setvaluesblockedlocal) {
2259 ierr = (*mat->ops->setvaluesblockedlocal)(mat,nrow,irow,ncol,icol,y,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2259,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2260 } else {
2261 PetscInt buf[8192],*bufr=0,*bufc=0,*irowm,*icolm;
2262 if ((nrow+ncol) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
2263 irowm = buf; icolm = buf + nrow;
2264 } else {
2265 ierr = PetscMalloc2(nrow,&bufr,ncol,&bufc)PetscMallocA(2,PETSC_FALSE,2265,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nrow)*sizeof(**(&bufr)),(&bufr),(size_t)(ncol
)*sizeof(**(&bufc)),(&bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2265,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2266 irowm = bufr; icolm = bufc;
2267 }
2268 ierr = ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping,nrow,irow,irowm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2268,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2269 ierr = ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping,ncol,icol,icolm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2269,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2270 ierr = MatSetValuesBlocked(mat,nrow,irowm,ncol,icolm,y,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2270,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2271 ierr = PetscFree2(bufr,bufc)PetscFreeA(2,2271,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufr),&(bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2271,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2272 }
2273 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2274#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
2275 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
2276 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
2277 }
2278#endif
2279 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2280}
2281
2282/*@
2283 MatMultDiagonalBlock - Computes the matrix-vector product, y = Dx. Where D is defined by the inode or block structure of the diagonal
2284
2285 Collective on Mat
2286
2287 Input Parameters:
2288+ mat - the matrix
2289- x - the vector to be multiplied
2290
2291 Output Parameters:
2292. y - the result
2293
2294 Notes:
2295 The vectors x and y cannot be the same. I.e., one cannot
2296 call MatMult(A,y,y).
2297
2298 Level: developer
2299
2300.seealso: MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
2301@*/
2302PetscErrorCode MatMultDiagonalBlock(Mat mat,Vec x,Vec y)
2303{
2304 PetscErrorCode ierr;
2305
2306 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2306; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2307 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2307,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2307,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2307,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2307,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2308 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2308,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2309 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2309,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2309,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2309,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2309,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2310 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2310,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2310,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2310,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2310,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2311
2312 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2312,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2313 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2313,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2314 if (x == y) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2314,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"x and y must be different vectors")
;
2315 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2315,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2316
2317 if (!mat->ops->multdiagonalblock) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"This matrix type does not have a multiply defined")return PetscError(PetscObjectComm((PetscObject)mat),2317,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"This matrix type does not have a multiply defined")
;
2318 ierr = (*mat->ops->multdiagonalblock)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2318,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2319 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2319,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2320 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2321}
2322
2323/* --------------------------------------------------------*/
2324/*@
2325 MatMult - Computes the matrix-vector product, y = Ax.
2326
2327 Neighbor-wise Collective on Mat
2328
2329 Input Parameters:
2330+ mat - the matrix
2331- x - the vector to be multiplied
2332
2333 Output Parameters:
2334. y - the result
2335
2336 Notes:
2337 The vectors x and y cannot be the same. I.e., one cannot
2338 call MatMult(A,y,y).
2339
2340 Level: beginner
2341
2342.seealso: MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
2343@*/
2344PetscErrorCode MatMult(Mat mat,Vec x,Vec y)
2345{
2346 PetscErrorCode ierr;
2347
2348 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2348; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2349 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2349,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2349,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2349,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2349,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2350 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2350,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2351 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2351,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2351,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2351,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2351,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2352 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2352,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2352,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2352,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2352,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2353 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2353,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2354 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2354,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2355 if (x == y) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2355,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"x and y must be different vectors")
;
2356#if !defined(PETSC_HAVE_CONSTRAINTS)
2357 if (mat->cmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2357,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->cmap->N,x->map->N)
;
2358 if (mat->rmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2358,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->rmap->N,y->map->N)
;
2359 if (mat->rmap->n != y->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: local dim %D %D",mat->rmap->n,y->map->n)return PetscError(((MPI_Comm)0x44000001),2359,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: local dim %D %D",mat->
rmap->n,y->map->n)
;
2360#endif
2361 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2362 if (mat->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2362,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
2363 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2363,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2364
2365 ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2365,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2366 if (!mat->ops->mult) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"This matrix type does not have a multiply defined")return PetscError(PetscObjectComm((PetscObject)mat),2366,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"This matrix type does not have a multiply defined")
;
2367 ierr = PetscLogEventBegin(MAT_Mult,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Mult].active) ? (*PetscLogPLB)((MAT_Mult),0,(PetscObject
)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2367,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2368 ierr = (*mat->ops->mult)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2368,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2369 ierr = PetscLogEventEnd(MAT_Mult,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Mult].active) ? (*PetscLogPLE)((MAT_Mult),0,(PetscObject
)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2369,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2370 if (mat->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2370,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
2371 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2371,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2372 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2373}
2374
2375/*@
2376 MatMultTranspose - Computes matrix transpose times a vector y = A^T * x.
2377
2378 Neighbor-wise Collective on Mat
2379
2380 Input Parameters:
2381+ mat - the matrix
2382- x - the vector to be multiplied
2383
2384 Output Parameters:
2385. y - the result
2386
2387 Notes:
2388 The vectors x and y cannot be the same. I.e., one cannot
2389 call MatMultTranspose(A,y,y).
2390
2391 For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2392 use MatMultHermitianTranspose()
2393
2394 Level: beginner
2395
2396.seealso: MatMult(), MatMultAdd(), MatMultTransposeAdd(), MatMultHermitianTranspose(), MatTranspose()
2397@*/
2398PetscErrorCode MatMultTranspose(Mat mat,Vec x,Vec y)
2399{
2400 PetscErrorCode ierr;
2401
2402 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2402; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2403 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2403,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2403,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2403,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2403,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2404 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2404,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2405 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2405,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2405,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2405,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2405,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2406 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2406,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2406,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2406,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2406,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2407
2408 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2408,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2409 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2409,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2410 if (x == y) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2410,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"x and y must be different vectors")
;
2411#if !defined(PETSC_HAVE_CONSTRAINTS)
2412 if (mat->rmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2412,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->rmap->N,x->map->N)
;
2413 if (mat->cmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->cmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2413,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->cmap->N,y->map->N)
;
2414#endif
2415 if (mat->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2415,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
2416 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2416,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2417
2418 if (!mat->ops->multtranspose) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"This matrix type does not have a multiply transpose defined")return PetscError(PetscObjectComm((PetscObject)mat),2418,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"This matrix type does not have a multiply transpose defined"
)
;
2419 ierr = PetscLogEventBegin(MAT_MultTranspose,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultTranspose].active) ? (*PetscLogPLB)((MAT_MultTranspose
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2419,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2420 ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2420,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2421 ierr = (*mat->ops->multtranspose)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2421,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2422 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2422,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2423 ierr = PetscLogEventEnd(MAT_MultTranspose,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultTranspose].active) ? (*PetscLogPLE)((MAT_MultTranspose
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2423,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2424 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2424,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2425 if (mat->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2425,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
2426 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2427}
2428
2429/*@
2430 MatMultHermitianTranspose - Computes matrix Hermitian transpose times a vector.
2431
2432 Neighbor-wise Collective on Mat
2433
2434 Input Parameters:
2435+ mat - the matrix
2436- x - the vector to be multilplied
2437
2438 Output Parameters:
2439. y - the result
2440
2441 Notes:
2442 The vectors x and y cannot be the same. I.e., one cannot
2443 call MatMultHermitianTranspose(A,y,y).
2444
2445 Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2446
2447 For real numbers MatMultTranspose() and MatMultHermitianTranspose() are identical.
2448
2449 Level: beginner
2450
2451.seealso: MatMult(), MatMultAdd(), MatMultHermitianTransposeAdd(), MatMultTranspose()
2452@*/
2453PetscErrorCode MatMultHermitianTranspose(Mat mat,Vec x,Vec y)
2454{
2455 PetscErrorCode ierr;
2456 Vec w;
2457
2458 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2458; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2459 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2459,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2459,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2459,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2459,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2460 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2460,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2461 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2461,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2461,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2461,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2461,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2462 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2462,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2462,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2462,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2462,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2463
2464 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2464,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2465 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2465,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2466 if (x == y) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2466,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"x and y must be different vectors")
;
2467#if !defined(PETSC_HAVE_CONSTRAINTS)
2468 if (mat->rmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2468,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->rmap->N,x->map->N)
;
2469 if (mat->cmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->cmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2469,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->cmap->N,y->map->N)
;
2470#endif
2471 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2471,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2472
2473 ierr = PetscLogEventBegin(MAT_MultHermitianTranspose,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultHermitianTranspose].active) ? (*PetscLogPLB)((MAT_MultHermitianTranspose
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2473,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2474 if (mat->ops->multhermitiantranspose) {
2475 ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2475,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2476 ierr = (*mat->ops->multhermitiantranspose)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2477 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2477,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2478 } else {
2479 ierr = VecDuplicate(x,&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2479,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2480 ierr = VecCopy(x,w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2480,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2481 ierr = VecConjugate(w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2481,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2482 ierr = MatMultTranspose(mat,w,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2482,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2483 ierr = VecDestroy(&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2483,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2484 ierr = VecConjugate(y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2484,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2485 }
2486 ierr = PetscLogEventEnd(MAT_MultHermitianTranspose,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultHermitianTranspose].active) ? (*PetscLogPLE)((MAT_MultHermitianTranspose
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2486,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2487 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2487,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2488 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2489}
2490
2491/*@
2492 MatMultAdd - Computes v3 = v2 + A * v1.
2493
2494 Neighbor-wise Collective on Mat
2495
2496 Input Parameters:
2497+ mat - the matrix
2498- v1, v2 - the vectors
2499
2500 Output Parameters:
2501. v3 - the result
2502
2503 Notes:
2504 The vectors v1 and v3 cannot be the same. I.e., one cannot
2505 call MatMultAdd(A,v1,v2,v1).
2506
2507 Level: beginner
2508
2509.seealso: MatMultTranspose(), MatMult(), MatMultTransposeAdd()
2510@*/
2511PetscErrorCode MatMultAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2512{
2513 PetscErrorCode ierr;
2514
2515 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2515; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2516 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2516,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2516,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2516,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2516,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2517 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2517,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2518 PetscValidHeaderSpecific(v1,VEC_CLASSID,2)do { if (!v1) return PetscError(((MPI_Comm)0x44000001),2518,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v1,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2518,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v1))->classid != VEC_CLASSID) { if
(((PetscObject)(v1))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2518,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2518,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2519 PetscValidHeaderSpecific(v2,VEC_CLASSID,3)do { if (!v2) return PetscError(((MPI_Comm)0x44000001),2519,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(v2,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2519,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(v2))->classid != VEC_CLASSID) { if
(((PetscObject)(v2))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2519,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2519,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2520 PetscValidHeaderSpecific(v3,VEC_CLASSID,4)do { if (!v3) return PetscError(((MPI_Comm)0x44000001),2520,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(v3,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2520,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(v3))->classid != VEC_CLASSID) { if
(((PetscObject)(v3))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2520,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),2520,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
2521
2522 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2522,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2523 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2523,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2524 if (mat->cmap->N != v1->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %D %D",mat->cmap->N,v1->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2524,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v1: global dim %D %D",mat->cmap->N,v1->
map->N)
;
2525 /* if (mat->rmap->N != v2->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %D %D",mat->rmap->N,v2->map->N);
2526 if (mat->rmap->N != v3->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %D %D",mat->rmap->N,v3->map->N); */
2527 if (mat->rmap->n != v3->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: local dim %D %D",mat->rmap->n,v3->map->n)return PetscError(((MPI_Comm)0x44000001),2527,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec v3: local dim %D %D",mat
->rmap->n,v3->map->n)
;
2528 if (mat->rmap->n != v2->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: local dim %D %D",mat->rmap->n,v2->map->n)return PetscError(((MPI_Comm)0x44000001),2528,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec v2: local dim %D %D",mat
->rmap->n,v2->map->n)
;
2529 if (v1 == v3) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2529,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"v1 and v3 must be different vectors")
;
2530 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2530,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2531
2532 if (!mat->ops->multadd) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No MatMultAdd() for matrix type '%s'",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),2532,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"No MatMultAdd() for matrix type '%s'",((PetscObject)mat)->
type_name)
;
2533 ierr = PetscLogEventBegin(MAT_MultAdd,mat,v1,v2,v3)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultAdd].active) ? (*PetscLogPLB)((MAT_MultAdd),0,(PetscObject
)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject)(v3))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2533,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2534 ierr = VecLockReadPush(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2534,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2535 ierr = (*mat->ops->multadd)(mat,v1,v2,v3);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2535,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2536 ierr = VecLockReadPop(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2536,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2537 ierr = PetscLogEventEnd(MAT_MultAdd,mat,v1,v2,v3)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultAdd].active) ? (*PetscLogPLE)((MAT_MultAdd),0,(PetscObject
)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject)(v3))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2537,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2538 ierr = PetscObjectStateIncrease((PetscObject)v3)(((PetscObject)v3)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2538,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2539 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2540}
2541
2542/*@
2543 MatMultTransposeAdd - Computes v3 = v2 + A' * v1.
2544
2545 Neighbor-wise Collective on Mat
2546
2547 Input Parameters:
2548+ mat - the matrix
2549- v1, v2 - the vectors
2550
2551 Output Parameters:
2552. v3 - the result
2553
2554 Notes:
2555 The vectors v1 and v3 cannot be the same. I.e., one cannot
2556 call MatMultTransposeAdd(A,v1,v2,v1).
2557
2558 Level: beginner
2559
2560.seealso: MatMultTranspose(), MatMultAdd(), MatMult()
2561@*/
2562PetscErrorCode MatMultTransposeAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2563{
2564 PetscErrorCode ierr;
2565
2566 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2566; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2567 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2567,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2567,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2567,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2567,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2568 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2568,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2569 PetscValidHeaderSpecific(v1,VEC_CLASSID,2)do { if (!v1) return PetscError(((MPI_Comm)0x44000001),2569,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v1,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2569,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v1))->classid != VEC_CLASSID) { if
(((PetscObject)(v1))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2569,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2570 PetscValidHeaderSpecific(v2,VEC_CLASSID,3)do { if (!v2) return PetscError(((MPI_Comm)0x44000001),2570,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(v2,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2570,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(v2))->classid != VEC_CLASSID) { if
(((PetscObject)(v2))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2570,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2570,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2571 PetscValidHeaderSpecific(v3,VEC_CLASSID,4)do { if (!v3) return PetscError(((MPI_Comm)0x44000001),2571,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(v3,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2571,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(v3))->classid != VEC_CLASSID) { if
(((PetscObject)(v3))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2571,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),2571,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
2572
2573 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2573,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2574 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2574,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2575 if (!mat->ops->multtransposeadd) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),2575,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
2576 if (v1 == v3) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2576,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"v1 and v3 must be different vectors")
;
2577 if (mat->rmap->N != v1->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %D %D",mat->rmap->N,v1->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2577,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v1: global dim %D %D",mat->rmap->N,v1->
map->N)
;
2578 if (mat->cmap->N != v2->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %D %D",mat->cmap->N,v2->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2578,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v2: global dim %D %D",mat->cmap->N,v2->
map->N)
;
2579 if (mat->cmap->N != v3->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %D %D",mat->cmap->N,v3->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2579,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v3: global dim %D %D",mat->cmap->N,v3->
map->N)
;
2580 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2581
2582 ierr = PetscLogEventBegin(MAT_MultTransposeAdd,mat,v1,v2,v3)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultTransposeAdd].active) ? (*PetscLogPLB)((MAT_MultTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject
)(v3)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2582,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2583 ierr = VecLockReadPush(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2583,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2584 ierr = (*mat->ops->multtransposeadd)(mat,v1,v2,v3);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2584,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2585 ierr = VecLockReadPop(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2585,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2586 ierr = PetscLogEventEnd(MAT_MultTransposeAdd,mat,v1,v2,v3)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultTransposeAdd].active) ? (*PetscLogPLE)((MAT_MultTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject
)(v3)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2586,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2587 ierr = PetscObjectStateIncrease((PetscObject)v3)(((PetscObject)v3)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2587,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2588 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2589}
2590
2591/*@
2592 MatMultHermitianTransposeAdd - Computes v3 = v2 + A^H * v1.
2593
2594 Neighbor-wise Collective on Mat
2595
2596 Input Parameters:
2597+ mat - the matrix
2598- v1, v2 - the vectors
2599
2600 Output Parameters:
2601. v3 - the result
2602
2603 Notes:
2604 The vectors v1 and v3 cannot be the same. I.e., one cannot
2605 call MatMultHermitianTransposeAdd(A,v1,v2,v1).
2606
2607 Level: beginner
2608
2609.seealso: MatMultHermitianTranspose(), MatMultTranspose(), MatMultAdd(), MatMult()
2610@*/
2611PetscErrorCode MatMultHermitianTransposeAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2612{
2613 PetscErrorCode ierr;
2614
2615 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2615; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2616 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2616,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2616,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2616,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2616,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2617 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2617,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2618 PetscValidHeaderSpecific(v1,VEC_CLASSID,2)do { if (!v1) return PetscError(((MPI_Comm)0x44000001),2618,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v1,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2618,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v1))->classid != VEC_CLASSID) { if
(((PetscObject)(v1))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2618,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2618,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2619 PetscValidHeaderSpecific(v2,VEC_CLASSID,3)do { if (!v2) return PetscError(((MPI_Comm)0x44000001),2619,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(v2,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2619,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(v2))->classid != VEC_CLASSID) { if
(((PetscObject)(v2))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2619,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2619,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2620 PetscValidHeaderSpecific(v3,VEC_CLASSID,4)do { if (!v3) return PetscError(((MPI_Comm)0x44000001),2620,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(v3,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2620,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(v3))->classid != VEC_CLASSID) { if
(((PetscObject)(v3))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2620,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),2620,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
2621
2622 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2622,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2623 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2623,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2624 if (v1 == v3) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2624,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"v1 and v3 must be different vectors")
;
2625 if (mat->rmap->N != v1->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %D %D",mat->rmap->N,v1->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2625,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v1: global dim %D %D",mat->rmap->N,v1->
map->N)
;
2626 if (mat->cmap->N != v2->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %D %D",mat->cmap->N,v2->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2626,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v2: global dim %D %D",mat->cmap->N,v2->
map->N)
;
2627 if (mat->cmap->N != v3->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %D %D",mat->cmap->N,v3->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2627,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v3: global dim %D %D",mat->cmap->N,v3->
map->N)
;
2628 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2628,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2629
2630 ierr = PetscLogEventBegin(MAT_MultHermitianTransposeAdd,mat,v1,v2,v3)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultHermitianTransposeAdd].active) ? (*PetscLogPLB)((MAT_MultHermitianTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject
)(v3)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2630,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2631 ierr = VecLockReadPush(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2631,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2632 if (mat->ops->multhermitiantransposeadd) {
2633 ierr = (*mat->ops->multhermitiantransposeadd)(mat,v1,v2,v3);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2633,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2634 } else {
2635 Vec w,z;
2636 ierr = VecDuplicate(v1,&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2636,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2637 ierr = VecCopy(v1,w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2637,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2638 ierr = VecConjugate(w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2638,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2639 ierr = VecDuplicate(v3,&z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2639,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2640 ierr = MatMultTranspose(mat,w,z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2640,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2641 ierr = VecDestroy(&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2641,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2642 ierr = VecConjugate(z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2642,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2643 if (v2 != v3) {
2644 ierr = VecWAXPY(v3,1.0,v2,z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2644,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2645 } else {
2646 ierr = VecAXPY(v3,1.0,z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2646,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2647 }
2648 ierr = VecDestroy(&z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2648,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2649 }
2650 ierr = VecLockReadPop(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2650,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2651 ierr = PetscLogEventEnd(MAT_MultHermitianTransposeAdd,mat,v1,v2,v3)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultHermitianTransposeAdd].active) ? (*PetscLogPLE)((MAT_MultHermitianTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject
)(v3)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2651,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2652 ierr = PetscObjectStateIncrease((PetscObject)v3)(((PetscObject)v3)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2652,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2653 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2654}
2655
2656/*@
2657 MatMultConstrained - The inner multiplication routine for a
2658 constrained matrix P^T A P.
2659
2660 Neighbor-wise Collective on Mat
2661
2662 Input Parameters:
2663+ mat - the matrix
2664- x - the vector to be multilplied
2665
2666 Output Parameters:
2667. y - the result
2668
2669 Notes:
2670 The vectors x and y cannot be the same. I.e., one cannot
2671 call MatMult(A,y,y).
2672
2673 Level: beginner
2674
2675.seealso: MatMult(), MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
2676@*/
2677PetscErrorCode MatMultConstrained(Mat mat,Vec x,Vec y)
2678{
2679 PetscErrorCode ierr;
2680
2681 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2681; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2682 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2682,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2682,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2683 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2683,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2683,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2684 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2684,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2684,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2685 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),2685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
2686 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),2686,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
2687 if (x == y) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(((MPI_Comm)0x44000001),2687,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"x and y must be different vectors")
;
2688 if (mat->cmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->cmap->N,x->map->N)
;
2689 if (mat->rmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2689,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->rmap->N,y->map->N)
;
2690 if (mat->rmap->n != y->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: local dim %D %D",mat->rmap->n,y->map->n)return PetscError(((MPI_Comm)0x44000001),2690,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: local dim %D %D",mat->
rmap->n,y->map->n)
;
2691
2692 ierr = PetscLogEventBegin(MAT_MultConstrained,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultConstrained].active) ? (*PetscLogPLB)((MAT_MultConstrained
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2692,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2693 ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2693,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2694 ierr = (*mat->ops->multconstrained)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2694,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2695 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2695,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2696 ierr = PetscLogEventEnd(MAT_MultConstrained,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultConstrained].active) ? (*PetscLogPLE)((MAT_MultConstrained
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2696,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2697 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2697,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2698 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2699}
2700
2701/*@
2702 MatMultTransposeConstrained - The inner multiplication routine for a
2703 constrained matrix P^T A^T P.
2704
2705 Neighbor-wise Collective on Mat
2706
2707 Input Parameters:
2708+ mat - the matrix
2709- x - the vector to be multilplied
2710
2711 Output Parameters:
2712. y - the result
2713
2714 Notes:
2715 The vectors x and y cannot be the same. I.e., one cannot
2716 call MatMult(A,y,y).
2717
2718 Level: beginner
2719
2720.seealso: MatMult(), MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
2721@*/
2722PetscErrorCode MatMultTransposeConstrained(Mat mat,Vec x,Vec y)
2723{
2724 PetscErrorCode ierr;
2725
2726 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2726; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2727 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2727,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2727,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2727,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2727,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2728 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2728,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2728,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2728,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2728,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2729 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2729,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2729,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2729,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2729,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2730 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),2730,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
2731 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),2731,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
2732 if (x == y) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(((MPI_Comm)0x44000001),2732,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"x and y must be different vectors")
;
2733 if (mat->rmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2733,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->cmap->N,x->map->N)
;
2734 if (mat->cmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2734,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->rmap->N,y->map->N)
;
2735
2736 ierr = PetscLogEventBegin(MAT_MultConstrained,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultConstrained].active) ? (*PetscLogPLB)((MAT_MultConstrained
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2736,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2737 ierr = (*mat->ops->multtransposeconstrained)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2737,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2738 ierr = PetscLogEventEnd(MAT_MultConstrained,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultConstrained].active) ? (*PetscLogPLE)((MAT_MultConstrained
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2738,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2739 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2739,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2740 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2741}
2742
2743/*@C
2744 MatGetFactorType - gets the type of factorization it is
2745
2746 Not Collective
2747
2748 Input Parameters:
2749. mat - the matrix
2750
2751 Output Parameters:
2752. t - the type, one of MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT
2753
2754 Level: intermediate
2755
2756.seealso: MatFactorType, MatGetFactor(), MatSetFactorType()
2757@*/
2758PetscErrorCode MatGetFactorType(Mat mat,MatFactorType *t)
2759{
2760 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2760; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2761 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2761,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2761,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2762 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2762,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2763 PetscValidPointer(t,2)do { if (!t) return PetscError(((MPI_Comm)0x44000001),2763,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(t,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),2763,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",2); } while (0)
;
2764 *t = mat->factortype;
2765 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2766}
2767
2768/*@C
2769 MatSetFactorType - sets the type of factorization it is
2770
2771 Logically Collective on Mat
2772
2773 Input Parameters:
2774+ mat - the matrix
2775- t - the type, one of MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT
2776
2777 Level: intermediate
2778
2779.seealso: MatFactorType, MatGetFactor(), MatGetFactorType()
2780@*/
2781PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2782{
2783 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2783; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2784 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2784,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2784,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2784,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2784,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2785 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2785,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2786 mat->factortype = t;
2787 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2788}
2789
2790/* ------------------------------------------------------------*/
2791/*@C
2792 MatGetInfo - Returns information about matrix storage (number of
2793 nonzeros, memory, etc.).
2794
2795 Collective on Mat if MAT_GLOBAL_MAX or MAT_GLOBAL_SUM is used as the flag
2796
2797 Input Parameters:
2798. mat - the matrix
2799
2800 Output Parameters:
2801+ flag - flag indicating the type of parameters to be returned
2802 (MAT_LOCAL - local matrix, MAT_GLOBAL_MAX - maximum over all processors,
2803 MAT_GLOBAL_SUM - sum over all processors)
2804- info - matrix information context
2805
2806 Notes:
2807 The MatInfo context contains a variety of matrix data, including
2808 number of nonzeros allocated and used, number of mallocs during
2809 matrix assembly, etc. Additional information for factored matrices
2810 is provided (such as the fill ratio, number of mallocs during
2811 factorization, etc.). Much of this info is printed to PETSC_STDOUT
2812 when using the runtime options
2813$ -info -mat_view ::ascii_info
2814
2815 Example for C/C++ Users:
2816 See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
2817 data within the MatInfo context. For example,
2818.vb
2819 MatInfo info;
2820 Mat A;
2821 double mal, nz_a, nz_u;
2822
2823 MatGetInfo(A,MAT_LOCAL,&info);
2824 mal = info.mallocs;
2825 nz_a = info.nz_allocated;
2826.ve
2827
2828 Example for Fortran Users:
2829 Fortran users should declare info as a double precision
2830 array of dimension MAT_INFO_SIZE, and then extract the parameters
2831 of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
2832 a complete list of parameter names.
2833.vb
2834 double precision info(MAT_INFO_SIZE)
2835 double precision mal, nz_a
2836 Mat A
2837 integer ierr
2838
2839 call MatGetInfo(A,MAT_LOCAL,info,ierr)
2840 mal = info(MAT_INFO_MALLOCS)
2841 nz_a = info(MAT_INFO_NZ_ALLOCATED)
2842.ve
2843
2844 Level: intermediate
2845
2846 Developer Note: fortran interface is not autogenerated as the f90
2847 interface defintion cannot be generated correctly [due to MatInfo]
2848
2849.seealso: MatStashGetInfo()
2850
2851@*/
2852PetscErrorCode MatGetInfo(Mat mat,MatInfoType flag,MatInfo *info)
2853{
2854 PetscErrorCode ierr;
2855
2856 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2856; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2857 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2857,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2857,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2857,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2857,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2858 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2858,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2859 PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),2859
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2859,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
2860 if (!mat->ops->getinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),2860,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
2861 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2861,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2862 ierr = (*mat->ops->getinfo)(mat,flag,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2862,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2863 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2864}
2865
2866/*
2867 This is used by external packages where it is not easy to get the info from the actual
2868 matrix factorization.
2869*/
2870PetscErrorCode MatGetInfo_External(Mat A,MatInfoType flag,MatInfo *info)
2871{
2872 PetscErrorCode ierr;
2873
2874 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2874; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2875 ierr = PetscMemzero(info,sizeof(MatInfo));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2875,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2876 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2877}
2878
2879/* ----------------------------------------------------------*/
2880
2881/*@C
2882 MatLUFactor - Performs in-place LU factorization of matrix.
2883
2884 Collective on Mat
2885
2886 Input Parameters:
2887+ mat - the matrix
2888. row - row permutation
2889. col - column permutation
2890- info - options for factorization, includes
2891$ fill - expected fill as ratio of original fill.
2892$ dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
2893$ Run with the option -info to determine an optimal value to use
2894
2895 Notes:
2896 Most users should employ the simplified KSP interface for linear solvers
2897 instead of working directly with matrix algebra routines such as this.
2898 See, e.g., KSPCreate().
2899
2900 This changes the state of the matrix to a factored matrix; it cannot be used
2901 for example with MatSetValues() unless one first calls MatSetUnfactored().
2902
2903 Level: developer
2904
2905.seealso: MatLUFactorSymbolic(), MatLUFactorNumeric(), MatCholeskyFactor(),
2906 MatGetOrdering(), MatSetUnfactored(), MatFactorInfo, MatGetFactor()
2907
2908 Developer Note: fortran interface is not autogenerated as the f90
2909 interface defintion cannot be generated correctly [due to MatFactorInfo]
2910
2911@*/
2912PetscErrorCode MatLUFactor(Mat mat,IS row,IS col,const MatFactorInfo *info)
2913{
2914 PetscErrorCode ierr;
2915 MatFactorInfo tinfo;
2916
2917 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2917; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2918 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2918,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2918,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2918,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2918,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2919 if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),2919,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2919,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2919,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2919,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2920 if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),2920,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2920,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2920,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2920,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2921 if (info) PetscValidPointer(info,4)do { if (!info) return PetscError(((MPI_Comm)0x44000001),2921
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2921,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
2922 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2922,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2923 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2923,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2924 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2924,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2925 if (!mat->ops->lufactor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),2925,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
2926 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2926,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2927 if (!info) {
2928 ierr = MatFactorInfoInitialize(&tinfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2928,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2929 info = &tinfo;
2930 }
2931
2932 ierr = PetscLogEventBegin(MAT_LUFactor,mat,row,col,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactor].active) ? (*PetscLogPLB)((MAT_LUFactor),0,(PetscObject
)(mat),(PetscObject)(row),(PetscObject)(col),(PetscObject)(0)
) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2932,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2933 ierr = (*mat->ops->lufactor)(mat,row,col,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2933,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2934 ierr = PetscLogEventEnd(MAT_LUFactor,mat,row,col,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactor].active) ? (*PetscLogPLE)((MAT_LUFactor),0,(PetscObject
)(mat),(PetscObject)(row),(PetscObject)(col),(PetscObject)(0)
) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2934,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2935 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2935,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2936 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2937}
2938
2939/*@C
2940 MatILUFactor - Performs in-place ILU factorization of matrix.
2941
2942 Collective on Mat
2943
2944 Input Parameters:
2945+ mat - the matrix
2946. row - row permutation
2947. col - column permutation
2948- info - structure containing
2949$ levels - number of levels of fill.
2950$ expected fill - as ratio of original fill.
2951$ 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
2952 missing diagonal entries)
2953
2954 Notes:
2955 Probably really in-place only when level of fill is zero, otherwise allocates
2956 new space to store factored matrix and deletes previous memory.
2957
2958 Most users should employ the simplified KSP interface for linear solvers
2959 instead of working directly with matrix algebra routines such as this.
2960 See, e.g., KSPCreate().
2961
2962 Level: developer
2963
2964.seealso: MatILUFactorSymbolic(), MatLUFactorNumeric(), MatCholeskyFactor(), MatFactorInfo
2965
2966 Developer Note: fortran interface is not autogenerated as the f90
2967 interface defintion cannot be generated correctly [due to MatFactorInfo]
2968
2969@*/
2970PetscErrorCode MatILUFactor(Mat mat,IS row,IS col,const MatFactorInfo *info)
2971{
2972 PetscErrorCode ierr;
2973
2974 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2974; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2975 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2975,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2975,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2975,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2975,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2976 if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),2976,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2976,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2976,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2976,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2977 if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),2977,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2977,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2977,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2977,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2978 PetscValidPointer(info,4)do { if (!info) return PetscError(((MPI_Comm)0x44000001),2978
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
2979 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2979,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2980 if (mat->rmap->N != mat->cmap->N) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"matrix must be square")return PetscError(PetscObjectComm((PetscObject)mat),2980,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"matrix must be square")
;
2981 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2981,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2982 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2982,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2983 if (!mat->ops->ilufactor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),2983,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
2984 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2985
2986 ierr = PetscLogEventBegin(MAT_ILUFactor,mat,row,col,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ILUFactor].active) ? (*PetscLogPLB)((MAT_ILUFactor),0,(PetscObject
)(mat),(PetscObject)(row),(PetscObject)(col),(PetscObject)(0)
) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2987 ierr = (*mat->ops->ilufactor)(mat,row,col,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2987,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2988 ierr = PetscLogEventEnd(MAT_ILUFactor,mat,row,col,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ILUFactor].active) ? (*PetscLogPLE)((MAT_ILUFactor),0,(PetscObject
)(mat),(PetscObject)(row),(PetscObject)(col),(PetscObject)(0)
) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2989 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2989,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2990 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2991}
2992
2993/*@C
2994 MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
2995 Call this routine before calling MatLUFactorNumeric().
2996
2997 Collective on Mat
2998
2999 Input Parameters:
3000+ fact - the factor matrix obtained with MatGetFactor()
3001. mat - the matrix
3002. row, col - row and column permutations
3003- info - options for factorization, includes
3004$ fill - expected fill as ratio of original fill.
3005$ dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3006$ Run with the option -info to determine an optimal value to use
3007
3008
3009 Notes:
3010 See Users-Manual: ch_mat for additional information about choosing the fill factor for better efficiency.
3011
3012 Most users should employ the simplified KSP interface for linear solvers
3013 instead of working directly with matrix algebra routines such as this.
3014 See, e.g., KSPCreate().
3015
3016 Level: developer
3017
3018.seealso: MatLUFactor(), MatLUFactorNumeric(), MatCholeskyFactor(), MatFactorInfo, MatFactorInfoInitialize()
3019
3020 Developer Note: fortran interface is not autogenerated as the f90
3021 interface defintion cannot be generated correctly [due to MatFactorInfo]
3022
3023@*/
3024PetscErrorCode MatLUFactorSymbolic(Mat fact,Mat mat,IS row,IS col,const MatFactorInfo *info)
3025{
3026 PetscErrorCode ierr;
3027
3028 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3028; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3029 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3029,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3029,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3029,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3029,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3030 if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),3030,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3030,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3030,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3030,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3031 if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),3031,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3031,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3031,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3031,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3032 if (info) PetscValidPointer(info,4)do { if (!info) return PetscError(((MPI_Comm)0x44000001),3032
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3032,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
3033 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3033,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3034 PetscValidPointer(fact,5)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3034
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3034,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
3035 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3035,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3036 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3036,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
3037 if (!(fact)->ops->lufactorsymbolic) {
3038 MatSolverType spackage;
3039 ierr = MatFactorGetSolverType(fact,&spackage);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3039,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3040 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic LU using solver package %s",((PetscObject)mat)->type_name,spackage)return PetscError(PetscObjectComm((PetscObject)mat),3040,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix type %s symbolic LU using solver package %s",((PetscObject
)mat)->type_name,spackage)
;
3041 }
3042 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3042,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
3043
3044 ierr = PetscLogEventBegin(MAT_LUFactorSymbolic,mat,row,col,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactorSymbolic].active) ? (*PetscLogPLB)((MAT_LUFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(row),(PetscObject)(col),
(PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3044,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3045 ierr = (fact->ops->lufactorsymbolic)(fact,mat,row,col,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3046 ierr = PetscLogEventEnd(MAT_LUFactorSymbolic,mat,row,col,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactorSymbolic].active) ? (*PetscLogPLE)((MAT_LUFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(row),(PetscObject)(col),
(PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3046,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3047 ierr = PetscObjectStateIncrease((PetscObject)fact)(((PetscObject)fact)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3047,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3048 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3049}
3050
3051/*@C
3052 MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3053 Call this routine after first calling MatLUFactorSymbolic().
3054
3055 Collective on Mat
3056
3057 Input Parameters:
3058+ fact - the factor matrix obtained with MatGetFactor()
3059. mat - the matrix
3060- info - options for factorization
3061
3062 Notes:
3063 See MatLUFactor() for in-place factorization. See
3064 MatCholeskyFactorNumeric() for the symmetric, positive definite case.
3065
3066 Most users should employ the simplified KSP interface for linear solvers
3067 instead of working directly with matrix algebra routines such as this.
3068 See, e.g., KSPCreate().
3069
3070 Level: developer
3071
3072.seealso: MatLUFactorSymbolic(), MatLUFactor(), MatCholeskyFactor()
3073
3074 Developer Note: fortran interface is not autogenerated as the f90
3075 interface defintion cannot be generated correctly [due to MatFactorInfo]
3076
3077@*/
3078PetscErrorCode MatLUFactorNumeric(Mat fact,Mat mat,const MatFactorInfo *info)
3079{
3080 PetscErrorCode ierr;
3081
3082 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3082; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3083 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3083,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3083,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3083,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3083,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3084 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3084,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3085 PetscValidPointer(fact,2)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3085
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3085,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
3086 PetscValidHeaderSpecific(fact,MAT_CLASSID,2)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3086
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(fact,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3086,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(fact))->classid != MAT_CLASSID) { if
(((PetscObject)(fact))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),3086,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3086,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3087 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3087,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3088 if (mat->rmap->N != (fact)->rmap->N || mat->cmap->N != (fact)->cmap->N) SETERRQ4(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Mat fact: global dimensions are different %D should = %D %D should = %D",mat->rmap->N,(fact)->rmap->N,mat->cmap->N,(fact)->cmap->N)return PetscError(PetscObjectComm((PetscObject)mat),3088,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Mat fact: global dimensions are different %D should = %D %D should = %D"
,mat->rmap->N,(fact)->rmap->N,mat->cmap->N,
(fact)->cmap->N)
;
3089
3090 if (!(fact)->ops->lufactornumeric) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s numeric LU",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3090,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s numeric LU",((PetscObject)mat)->type_name)
;
3091 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3091,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
3092 ierr = PetscLogEventBegin(MAT_LUFactorNumeric,mat,fact,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactorNumeric].active) ? (*PetscLogPLB)((MAT_LUFactorNumeric
),0,(PetscObject)(mat),(PetscObject)(fact),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3092,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3093 ierr = (fact->ops->lufactornumeric)(fact,mat,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3093,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3094 ierr = PetscLogEventEnd(MAT_LUFactorNumeric,mat,fact,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactorNumeric].active) ? (*PetscLogPLE)((MAT_LUFactorNumeric
),0,(PetscObject)(mat),(PetscObject)(fact),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3094,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3095 ierr = MatViewFromOptions(fact,NULL((void*)0),"-mat_factor_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3095,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3096 ierr = PetscObjectStateIncrease((PetscObject)fact)(((PetscObject)fact)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3096,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3097 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3098}
3099
3100/*@C
3101 MatCholeskyFactor - Performs in-place Cholesky factorization of a
3102 symmetric matrix.
3103
3104 Collective on Mat
3105
3106 Input Parameters:
3107+ mat - the matrix
3108. perm - row and column permutations
3109- f - expected fill as ratio of original fill
3110
3111 Notes:
3112 See MatLUFactor() for the nonsymmetric case. See also
3113 MatCholeskyFactorSymbolic(), and MatCholeskyFactorNumeric().
3114
3115 Most users should employ the simplified KSP interface for linear solvers
3116 instead of working directly with matrix algebra routines such as this.
3117 See, e.g., KSPCreate().
3118
3119 Level: developer
3120
3121.seealso: MatLUFactor(), MatCholeskyFactorSymbolic(), MatCholeskyFactorNumeric()
3122 MatGetOrdering()
3123
3124 Developer Note: fortran interface is not autogenerated as the f90
3125 interface defintion cannot be generated correctly [due to MatFactorInfo]
3126
3127@*/
3128PetscErrorCode MatCholeskyFactor(Mat mat,IS perm,const MatFactorInfo *info)
3129{
3130 PetscErrorCode ierr;
3131
3132 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3132; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3133 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3133,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3133,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3133,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3133,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3134 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3134,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3135 if (perm) PetscValidHeaderSpecific(perm,IS_CLASSID,2)do { if (!perm) return PetscError(((MPI_Comm)0x44000001),3135
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(perm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(perm))->classid != IS_CLASSID) { if
(((PetscObject)(perm))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),3135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3135,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3136 if (info) PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),3136
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3136,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
3137 if (mat->rmap->N != mat->cmap->N) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"Matrix must be square")return PetscError(PetscObjectComm((PetscObject)mat),3137,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Matrix must be square")
;
3138 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3138,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3139 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3139,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
3140 if (!mat->ops->choleskyfactor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"In-place factorization for Mat type %s is not supported, try out-of-place factorization. See MatCholeskyFactorSymbolic/Numeric",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3140,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"In-place factorization for Mat type %s is not supported, try out-of-place factorization. See MatCholeskyFactorSymbolic/Numeric"
,((PetscObject)mat)->type_name)
;
3141 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3141,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3142
3143 ierr = PetscLogEventBegin(MAT_CholeskyFactor,mat,perm,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactor].active) ? (*PetscLogPLB)((MAT_CholeskyFactor
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3143,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3144 ierr = (*mat->ops->choleskyfactor)(mat,perm,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3144,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3145 ierr = PetscLogEventEnd(MAT_CholeskyFactor,mat,perm,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactor].active) ? (*PetscLogPLE)((MAT_CholeskyFactor
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3145,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3146 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3146,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3147 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3148}
3149
3150/*@C
3151 MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3152 of a symmetric matrix.
3153
3154 Collective on Mat
3155
3156 Input Parameters:
3157+ fact - the factor matrix obtained with MatGetFactor()
3158. mat - the matrix
3159. perm - row and column permutations
3160- info - options for factorization, includes
3161$ fill - expected fill as ratio of original fill.
3162$ dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3163$ Run with the option -info to determine an optimal value to use
3164
3165 Notes:
3166 See MatLUFactorSymbolic() for the nonsymmetric case. See also
3167 MatCholeskyFactor() and MatCholeskyFactorNumeric().
3168
3169 Most users should employ the simplified KSP interface for linear solvers
3170 instead of working directly with matrix algebra routines such as this.
3171 See, e.g., KSPCreate().
3172
3173 Level: developer
3174
3175.seealso: MatLUFactorSymbolic(), MatCholeskyFactor(), MatCholeskyFactorNumeric()
3176 MatGetOrdering()
3177
3178 Developer Note: fortran interface is not autogenerated as the f90
3179 interface defintion cannot be generated correctly [due to MatFactorInfo]
3180
3181@*/
3182PetscErrorCode MatCholeskyFactorSymbolic(Mat fact,Mat mat,IS perm,const MatFactorInfo *info)
3183{
3184 PetscErrorCode ierr;
3185
3186 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3186; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3187 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3187,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3187,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3187,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3187,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3188 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3188,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3189 if (perm) PetscValidHeaderSpecific(perm,IS_CLASSID,2)do { if (!perm) return PetscError(((MPI_Comm)0x44000001),3189
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(perm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3189,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(perm))->classid != IS_CLASSID) { if
(((PetscObject)(perm))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),3189,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3189,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3190 if (info) PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),3190
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3190,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
3191 PetscValidPointer(fact,4)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3191
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
3192 if (mat->rmap->N != mat->cmap->N) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"Matrix must be square")return PetscError(PetscObjectComm((PetscObject)mat),3192,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Matrix must be square")
;
3193 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3193,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3194 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3194,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
3195 if (!(fact)->ops->choleskyfactorsymbolic) {
3196 MatSolverType spackage;
3197 ierr = MatFactorGetSolverType(fact,&spackage);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3197,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3198 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s symbolic factor Cholesky using solver package %s",((PetscObject)mat)->type_name,spackage)return PetscError(PetscObjectComm((PetscObject)mat),3198,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s symbolic factor Cholesky using solver package %s"
,((PetscObject)mat)->type_name,spackage)
;
3199 }
3200 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3200,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
3201
3202 ierr = PetscLogEventBegin(MAT_CholeskyFactorSymbolic,mat,perm,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactorSymbolic].active) ? (*PetscLogPLB)((MAT_CholeskyFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3202,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3203 ierr = (fact->ops->choleskyfactorsymbolic)(fact,mat,perm,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3203,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3204 ierr = PetscLogEventEnd(MAT_CholeskyFactorSymbolic,mat,perm,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactorSymbolic].active) ? (*PetscLogPLE)((MAT_CholeskyFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3205 ierr = PetscObjectStateIncrease((PetscObject)fact)(((PetscObject)fact)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3206 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3207}
3208
3209/*@C
3210 MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3211 of a symmetric matrix. Call this routine after first calling
3212 MatCholeskyFactorSymbolic().
3213
3214 Collective on Mat
3215
3216 Input Parameters:
3217+ fact - the factor matrix obtained with MatGetFactor()
3218. mat - the initial matrix
3219. info - options for factorization
3220- fact - the symbolic factor of mat
3221
3222
3223 Notes:
3224 Most users should employ the simplified KSP interface for linear solvers
3225 instead of working directly with matrix algebra routines such as this.
3226 See, e.g., KSPCreate().
3227
3228 Level: developer
3229
3230.seealso: MatCholeskyFactorSymbolic(), MatCholeskyFactor(), MatLUFactorNumeric()
3231
3232 Developer Note: fortran interface is not autogenerated as the f90
3233 interface defintion cannot be generated correctly [due to MatFactorInfo]
3234
3235@*/
3236PetscErrorCode MatCholeskyFactorNumeric(Mat fact,Mat mat,const MatFactorInfo *info)
3237{
3238 PetscErrorCode ierr;
3239
3240 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3240; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3241 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3241,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3241,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3241,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3241,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3242 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3242,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3243 PetscValidPointer(fact,2)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3243
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3243,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
3244 PetscValidHeaderSpecific(fact,MAT_CLASSID,2)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3244
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(fact,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(fact))->classid != MAT_CLASSID) { if
(((PetscObject)(fact))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),3244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3244,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3245 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3245,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3246 if (!(fact)->ops->choleskyfactornumeric) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s numeric factor Cholesky",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3246,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s numeric factor Cholesky",((PetscObject)mat)->
type_name)
;
3247 if (mat->rmap->N != (fact)->rmap->N || mat->cmap->N != (fact)->cmap->N) SETERRQ4(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Mat fact: global dim %D should = %D %D should = %D",mat->rmap->N,(fact)->rmap->N,mat->cmap->N,(fact)->cmap->N)return PetscError(PetscObjectComm((PetscObject)mat),3247,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Mat fact: global dim %D should = %D %D should = %D"
,mat->rmap->N,(fact)->rmap->N,mat->cmap->N,
(fact)->cmap->N)
;
3248 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
3249
3250 ierr = PetscLogEventBegin(MAT_CholeskyFactorNumeric,mat,fact,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactorNumeric].active) ? (*PetscLogPLB)((MAT_CholeskyFactorNumeric
),0,(PetscObject)(mat),(PetscObject)(fact),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3250,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3251 ierr = (fact->ops->choleskyfactornumeric)(fact,mat,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3251,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3252 ierr = PetscLogEventEnd(MAT_CholeskyFactorNumeric,mat,fact,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactorNumeric].active) ? (*PetscLogPLE)((MAT_CholeskyFactorNumeric
),0,(PetscObject)(mat),(PetscObject)(fact),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3252,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3253 ierr = MatViewFromOptions(fact,NULL((void*)0),"-mat_factor_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3253,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3254 ierr = PetscObjectStateIncrease((PetscObject)fact)(((PetscObject)fact)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3254,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3255 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3256}
3257
3258/* ----------------------------------------------------------------*/
3259/*@
3260 MatSolve - Solves A x = b, given a factored matrix.
3261
3262 Neighbor-wise Collective on Mat
3263
3264 Input Parameters:
3265+ mat - the factored matrix
3266- b - the right-hand-side vector
3267
3268 Output Parameter:
3269. x - the result vector
3270
3271 Notes:
3272 The vectors b and x cannot be the same. I.e., one cannot
3273 call MatSolve(A,x,x).
3274
3275 Notes:
3276 Most users should employ the simplified KSP interface for linear solvers
3277 instead of working directly with matrix algebra routines such as this.
3278 See, e.g., KSPCreate().
3279
3280 Level: developer
3281
3282.seealso: MatSolveAdd(), MatSolveTranspose(), MatSolveTransposeAdd()
3283@*/
3284PetscErrorCode MatSolve(Mat mat,Vec b,Vec x)
3285{
3286 PetscErrorCode ierr;
3287
3288 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3288; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3289 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3289,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3289,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3289,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3289,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3290 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3290,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3291 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3291,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3291,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3291,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3291,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3292 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3292,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3292,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3292,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3292,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3293 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3293,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3293,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3294 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3295 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3295,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3296 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3296,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3297 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3297,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3298 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3298,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3299 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3300 if (!mat->ops->solve) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3300,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3301 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3301,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3302
3303 ierr = PetscLogEventBegin(MAT_Solve,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Solve].active) ? (*PetscLogPLB)((MAT_Solve),0,(PetscObject
)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3304 if (mat->factorerrortype) {
3305 ierr = PetscInfo1(mat,"MatFactorError %D\n",mat->factorerrortype)PetscInfo_Private(__func__,mat,"MatFactorError %D\n",mat->
factorerrortype)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3305,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3306 ierr = VecSetInf(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3306,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3307 } else {
3308 if (!mat->ops->solve) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3308,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3309 ierr = (*mat->ops->solve)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3309,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3310 }
3311 ierr = PetscLogEventEnd(MAT_Solve,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Solve].active) ? (*PetscLogPLE)((MAT_Solve),0,(PetscObject
)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3311,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3312 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3312,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3313 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3314}
3315
3316static PetscErrorCode MatMatSolve_Basic(Mat A,Mat B,Mat X, PetscBool trans)
3317{
3318 PetscErrorCode ierr;
3319 Vec b,x;
3320 PetscInt m,N,i;
3321 PetscScalar *bb,*xx;
3322 PetscBool flg;
3323
3324 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3324; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3325 ierr = PetscObjectTypeCompareAny((PetscObject)B,&flg,MATSEQDENSE"seqdense",MATMPIDENSE"mpidense",NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3325,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3326 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix")return PetscError(PetscObjectComm((PetscObject)A),3326,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Matrix B must be MATDENSE matrix")
;
3327 ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE"seqdense",MATMPIDENSE"mpidense",NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3327,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3328 if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix")return PetscError(PetscObjectComm((PetscObject)A),3328,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Matrix X must be MATDENSE matrix")
;
3329
3330 ierr = MatDenseGetArray(B,&bb);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3330,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3331 ierr = MatDenseGetArray(X,&xx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3331,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3332 ierr = MatGetLocalSize(B,&m,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3332,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* number local rows */
3333 ierr = MatGetSize(B,NULL((void*)0),&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3333,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* total columns in dense matrix */
3334 ierr = MatCreateVecs(A,&x,&b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3335 for (i=0; i<N; i++) {
3336 ierr = VecPlaceArray(b,bb + i*m);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3336,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3337 ierr = VecPlaceArray(x,xx + i*m);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3337,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3338 if (trans) {
3339 ierr = MatSolveTranspose(A,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3339,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3340 } else {
3341 ierr = MatSolve(A,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3341,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3342 }
3343 ierr = VecResetArray(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3343,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3344 ierr = VecResetArray(b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3344,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3345 }
3346 ierr = VecDestroy(&b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3346,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3347 ierr = VecDestroy(&x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3347,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3348 ierr = MatDenseRestoreArray(B,&bb);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3348,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3349 ierr = MatDenseRestoreArray(X,&xx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3349,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3350 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3351}
3352
3353/*@
3354 MatMatSolve - Solves A X = B, given a factored matrix.
3355
3356 Neighbor-wise Collective on Mat
3357
3358 Input Parameters:
3359+ A - the factored matrix
3360- B - the right-hand-side matrix (dense matrix)
3361
3362 Output Parameter:
3363. X - the result matrix (dense matrix)
3364
3365 Notes:
3366 The matrices b and x cannot be the same. I.e., one cannot
3367 call MatMatSolve(A,x,x).
3368
3369 Notes:
3370 Most users should usually employ the simplified KSP interface for linear solvers
3371 instead of working directly with matrix algebra routines such as this.
3372 See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3373 at a time.
3374
3375 When using SuperLU_Dist as a parallel solver PETSc will use the SuperLU_Dist functionality to solve multiple right hand sides simultaneously. For MUMPS
3376 it calls a separate solve for each right hand side since MUMPS does not yet support distributed right hand sides.
3377
3378 Since the resulting matrix X must always be dense we do not support sparse representation of the matrix B.
3379
3380 Level: developer
3381
3382.seealso: MatMatSolveTranspose(), MatLUFactor(), MatCholeskyFactor()
3383@*/
3384PetscErrorCode MatMatSolve(Mat A,Mat B,Mat X)
3385{
3386 PetscErrorCode ierr;
3387
3388 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3388; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3389 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),3389,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3389,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3389,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3389,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3390 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),3390,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
3391 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),3391,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3391,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3391,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3391,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3392 PetscValidHeaderSpecific(X,MAT_CLASSID,3)do { if (!X) return PetscError(((MPI_Comm)0x44000001),3392,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(X,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3392,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(X))->classid != MAT_CLASSID) { if (
((PetscObject)(X))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3392,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3392,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3393 PetscCheckSameComm(A,1,B,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)B),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3393,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3393,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3394 PetscCheckSameComm(A,1,X,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)X),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3394,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3394,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3395 if (X == B) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_IDN,"X and B must be different matrices")return PetscError(PetscObjectComm((PetscObject)A),3395,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"X and B must be different matrices")
;
3396 if (A->cmap->N != X->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3396,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->
N)
;
3397 if (A->rmap->N != B->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %D %D",A->rmap->N,B->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3397,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat B: global dim %D %D",A->rmap->N,B->rmap->
N)
;
3398 if (X->cmap->N < B->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as rhs matrix")return PetscError(((MPI_Comm)0x44000001),3398,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Solution matrix must have same number of columns as rhs matrix"
)
;
3399 if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3400 if (!A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)A),3400,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3401 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3401,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
3402
3403 ierr = PetscLogEventBegin(MAT_MatSolve,A,B,X,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatSolve].active) ? (*PetscLogPLB)((MAT_MatSolve),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(X),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3403,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3404 if (!A->ops->matsolve) {
3405 ierr = PetscInfo1(A,"Mat type %s using basic MatMatSolve\n",((PetscObject)A)->type_name)PetscInfo_Private(__func__,A,"Mat type %s using basic MatMatSolve\n"
,((PetscObject)A)->type_name)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3405,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3406 ierr = MatMatSolve_Basic(A,B,X,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3406,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3407 } else {
3408 ierr = (*A->ops->matsolve)(A,B,X);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3408,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3409 }
3410 ierr = PetscLogEventEnd(MAT_MatSolve,A,B,X,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatSolve].active) ? (*PetscLogPLE)((MAT_MatSolve),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(X),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3410,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3411 ierr = PetscObjectStateIncrease((PetscObject)X)(((PetscObject)X)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3411,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3412 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3413}
3414
3415/*@
3416 MatMatSolveTranspose - Solves A^T X = B, given a factored matrix.
3417
3418 Neighbor-wise Collective on Mat
3419
3420 Input Parameters:
3421+ A - the factored matrix
3422- B - the right-hand-side matrix (dense matrix)
3423
3424 Output Parameter:
3425. X - the result matrix (dense matrix)
3426
3427 Notes:
3428 The matrices B and X cannot be the same. I.e., one cannot
3429 call MatMatSolveTranspose(A,X,X).
3430
3431 Notes:
3432 Most users should usually employ the simplified KSP interface for linear solvers
3433 instead of working directly with matrix algebra routines such as this.
3434 See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3435 at a time.
3436
3437 When using SuperLU_Dist or MUMPS as a parallel solver, PETSc will use their functionality to solve multiple right hand sides simultaneously.
3438
3439 Level: developer
3440
3441.seealso: MatMatSolve(), MatLUFactor(), MatCholeskyFactor()
3442@*/
3443PetscErrorCode MatMatSolveTranspose(Mat A,Mat B,Mat X)
3444{
3445 PetscErrorCode ierr;
3446
3447 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3447; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3448 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),3448,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3448,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3448,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3448,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3449 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),3449,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
3450 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),3450,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3450,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3450,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3450,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3451 PetscValidHeaderSpecific(X,MAT_CLASSID,3)do { if (!X) return PetscError(((MPI_Comm)0x44000001),3451,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(X,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3451,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(X))->classid != MAT_CLASSID) { if (
((PetscObject)(X))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3451,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3451,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3452 PetscCheckSameComm(A,1,B,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)B),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3452,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3452,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3453 PetscCheckSameComm(A,1,X,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)X),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3453,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3453,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3454 if (X == B) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_IDN,"X and B must be different matrices")return PetscError(PetscObjectComm((PetscObject)A),3454,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"X and B must be different matrices")
;
3455 if (A->cmap->N != X->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3455,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->
N)
;
3456 if (A->rmap->N != B->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %D %D",A->rmap->N,B->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3456,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat B: global dim %D %D",A->rmap->N,B->rmap->
N)
;
3457 if (A->rmap->n != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat A,Mat B: local dim %D %D",A->rmap->n,B->rmap->n)return PetscError(((MPI_Comm)0x44000001),3457,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat A,Mat B: local dim %D %D",A->
rmap->n,B->rmap->n)
;
3458 if (X->cmap->N < B->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as rhs matrix")return PetscError(((MPI_Comm)0x44000001),3458,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Solution matrix must have same number of columns as rhs matrix"
)
;
3459 if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3460 if (!A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)A),3460,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3461 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3461,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
3462
3463 ierr = PetscLogEventBegin(MAT_MatSolve,A,B,X,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatSolve].active) ? (*PetscLogPLB)((MAT_MatSolve),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(X),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3463,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3464 if (!A->ops->matsolvetranspose) {
3465 ierr = PetscInfo1(A,"Mat type %s using basic MatMatSolveTranspose\n",((PetscObject)A)->type_name)PetscInfo_Private(__func__,A,"Mat type %s using basic MatMatSolveTranspose\n"
,((PetscObject)A)->type_name)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3465,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3466 ierr = MatMatSolve_Basic(A,B,X,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3466,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3467 } else {
3468 ierr = (*A->ops->matsolvetranspose)(A,B,X);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3468,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3469 }
3470 ierr = PetscLogEventEnd(MAT_MatSolve,A,B,X,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatSolve].active) ? (*PetscLogPLE)((MAT_MatSolve),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(X),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3470,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3471 ierr = PetscObjectStateIncrease((PetscObject)X)(((PetscObject)X)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3471,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3472 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3473}
3474
3475/*@
3476 MatMatTransposeSolve - Solves A X = B^T, given a factored matrix.
3477
3478 Neighbor-wise Collective on Mat
3479
3480 Input Parameters:
3481+ A - the factored matrix
3482- Bt - the transpose of right-hand-side matrix
3483
3484 Output Parameter:
3485. X - the result matrix (dense matrix)
3486
3487 Notes:
3488 Most users should usually employ the simplified KSP interface for linear solvers
3489 instead of working directly with matrix algebra routines such as this.
3490 See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3491 at a time.
3492
3493 For MUMPS, it only supports centralized sparse compressed column format on the host processor for right hand side matrix. User must create B^T in sparse compressed row format on the host processor and call MatMatTransposeSolve() to implement MUMPS' MatMatSolve().
3494
3495 Level: developer
3496
3497.seealso: MatMatSolve(), MatMatSolveTranspose(), MatLUFactor(), MatCholeskyFactor()
3498@*/
3499PetscErrorCode MatMatTransposeSolve(Mat A,Mat Bt,Mat X)
3500{
3501 PetscErrorCode ierr;
3502
3503 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3503; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3504 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),3504,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3504,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3504,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3504,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3505 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),3505,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
3506 PetscValidHeaderSpecific(Bt,MAT_CLASSID,2)do { if (!Bt) return PetscError(((MPI_Comm)0x44000001),3506,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(Bt,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),3506,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(Bt))->classid != MAT_CLASSID) { if
(((PetscObject)(Bt))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),3506,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3506,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3507 PetscValidHeaderSpecific(X,MAT_CLASSID,3)do { if (!X) return PetscError(((MPI_Comm)0x44000001),3507,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(X,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3507,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(X))->classid != MAT_CLASSID) { if (
((PetscObject)(X))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3507,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3507,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3508 PetscCheckSameComm(A,1,Bt,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)Bt),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3508,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3508,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3509 PetscCheckSameComm(A,1,X,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)X),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3509,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3509,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3510
3511 if (X == Bt) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_IDN,"X and B must be different matrices")return PetscError(PetscObjectComm((PetscObject)A),3511,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"X and B must be different matrices")
;
3512 if (A->cmap->N != X->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3512,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->
N)
;
3513 if (A->rmap->N != Bt->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat Bt: global dim %D %D",A->rmap->N,Bt->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),3513,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat Bt: global dim %D %D",A->rmap->N,Bt->cmap
->N)
;
3514 if (X->cmap->N < Bt->rmap->N) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as row number of the rhs matrix")return PetscError(PetscObjectComm((PetscObject)X),3514,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Solution matrix must have same number of columns as row number of the rhs matrix"
)
;
3515 if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3516 if (!A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)A),3516,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3517 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3517,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
3518
3519 if (!A->ops->mattransposesolve) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),3519,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)A)->type_name)
;
3520 ierr = PetscLogEventBegin(MAT_MatTrSolve,A,Bt,X,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTrSolve].active) ? (*PetscLogPLB)((MAT_MatTrSolve),0,
(PetscObject)(A),(PetscObject)(Bt),(PetscObject)(X),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3520,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3521 ierr = (*A->ops->mattransposesolve)(A,Bt,X);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3521,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3522 ierr = PetscLogEventEnd(MAT_MatTrSolve,A,Bt,X,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTrSolve].active) ? (*PetscLogPLE)((MAT_MatTrSolve),0,
(PetscObject)(A),(PetscObject)(Bt),(PetscObject)(X),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3522,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3523 ierr = PetscObjectStateIncrease((PetscObject)X)(((PetscObject)X)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3523,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3524 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3525}
3526
3527/*@
3528 MatForwardSolve - Solves L x = b, given a factored matrix, A = LU, or
3529 U^T*D^(1/2) x = b, given a factored symmetric matrix, A = U^T*D*U,
3530
3531 Neighbor-wise Collective on Mat
3532
3533 Input Parameters:
3534+ mat - the factored matrix
3535- b - the right-hand-side vector
3536
3537 Output Parameter:
3538. x - the result vector
3539
3540 Notes:
3541 MatSolve() should be used for most applications, as it performs
3542 a forward solve followed by a backward solve.
3543
3544 The vectors b and x cannot be the same, i.e., one cannot
3545 call MatForwardSolve(A,x,x).
3546
3547 For matrix in seqsbaij format with block size larger than 1,
3548 the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3549 MatForwardSolve() solves U^T*D y = b, and
3550 MatBackwardSolve() solves U x = y.
3551 Thus they do not provide a symmetric preconditioner.
3552
3553 Most users should employ the simplified KSP interface for linear solvers
3554 instead of working directly with matrix algebra routines such as this.
3555 See, e.g., KSPCreate().
3556
3557 Level: developer
3558
3559.seealso: MatSolve(), MatBackwardSolve()
3560@*/
3561PetscErrorCode MatForwardSolve(Mat mat,Vec b,Vec x)
3562{
3563 PetscErrorCode ierr;
3564
3565 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3565; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3566 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3566,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3566,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3566,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3566,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3567 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3567,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3568 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3568,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3568,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3568,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3568,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3569 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3569,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3569,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3570 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3570,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3570,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3571 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3571,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3571,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3572 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3572,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3573 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3573,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3574 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3574,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3575 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3575,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3576 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3577 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3577,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3578 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3578,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3579
3580 if (!mat->ops->forwardsolve) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3580,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3581 ierr = PetscLogEventBegin(MAT_ForwardSolve,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ForwardSolve].active) ? (*PetscLogPLB)((MAT_ForwardSolve
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3581,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3582 ierr = (*mat->ops->forwardsolve)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3582,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3583 ierr = PetscLogEventEnd(MAT_ForwardSolve,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ForwardSolve].active) ? (*PetscLogPLE)((MAT_ForwardSolve
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3583,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3584 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3584,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3585 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3586}
3587
3588/*@
3589 MatBackwardSolve - Solves U x = b, given a factored matrix, A = LU.
3590 D^(1/2) U x = b, given a factored symmetric matrix, A = U^T*D*U,
3591
3592 Neighbor-wise Collective on Mat
3593
3594 Input Parameters:
3595+ mat - the factored matrix
3596- b - the right-hand-side vector
3597
3598 Output Parameter:
3599. x - the result vector
3600
3601 Notes:
3602 MatSolve() should be used for most applications, as it performs
3603 a forward solve followed by a backward solve.
3604
3605 The vectors b and x cannot be the same. I.e., one cannot
3606 call MatBackwardSolve(A,x,x).
3607
3608 For matrix in seqsbaij format with block size larger than 1,
3609 the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3610 MatForwardSolve() solves U^T*D y = b, and
3611 MatBackwardSolve() solves U x = y.
3612 Thus they do not provide a symmetric preconditioner.
3613
3614 Most users should employ the simplified KSP interface for linear solvers
3615 instead of working directly with matrix algebra routines such as this.
3616 See, e.g., KSPCreate().
3617
3618 Level: developer
3619
3620.seealso: MatSolve(), MatForwardSolve()
3621@*/
3622PetscErrorCode MatBackwardSolve(Mat mat,Vec b,Vec x)
3623{
3624 PetscErrorCode ierr;
3625
3626 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3626; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3627 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3627,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3627,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3627,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3627,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3628 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3628,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3629 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3629,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3629,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3629,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3629,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3630 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3630,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3630,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3630,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3630,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3631 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3631,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3631,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3632 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3632,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3632,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3633 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3633,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3634 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3634,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3635 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3635,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3636 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3636,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3637 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3638 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3638,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3639 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3639,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3640
3641 if (!mat->ops->backwardsolve) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3641,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3642 ierr = PetscLogEventBegin(MAT_BackwardSolve,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_BackwardSolve].active) ? (*PetscLogPLB)((MAT_BackwardSolve
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3642,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3643 ierr = (*mat->ops->backwardsolve)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3643,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3644 ierr = PetscLogEventEnd(MAT_BackwardSolve,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_BackwardSolve].active) ? (*PetscLogPLE)((MAT_BackwardSolve
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3644,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3645 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3645,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3646 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3647}
3648
3649/*@
3650 MatSolveAdd - Computes x = y + inv(A)*b, given a factored matrix.
3651
3652 Neighbor-wise Collective on Mat
3653
3654 Input Parameters:
3655+ mat - the factored matrix
3656. b - the right-hand-side vector
3657- y - the vector to be added to
3658
3659 Output Parameter:
3660. x - the result vector
3661
3662 Notes:
3663 The vectors b and x cannot be the same. I.e., one cannot
3664 call MatSolveAdd(A,x,y,x).
3665
3666 Most users should employ the simplified KSP interface for linear solvers
3667 instead of working directly with matrix algebra routines such as this.
3668 See, e.g., KSPCreate().
3669
3670 Level: developer
3671
3672.seealso: MatSolve(), MatSolveTranspose(), MatSolveTransposeAdd()
3673@*/
3674PetscErrorCode MatSolveAdd(Mat mat,Vec b,Vec y,Vec x)
3675{
3676 PetscScalar one = 1.0;
3677 Vec tmp;
3678 PetscErrorCode ierr;
3679
3680 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3680; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3681 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3681,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3681,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3681,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3681,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3682 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3683 PetscValidHeaderSpecific(y,VEC_CLASSID,2)do { if (!y) return PetscError(((MPI_Comm)0x44000001),3683,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3683,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3684 PetscValidHeaderSpecific(b,VEC_CLASSID,3)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3684,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3684,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3685 PetscValidHeaderSpecific(x,VEC_CLASSID,4)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3685,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),3685,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
3686 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3686,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3686,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3687 PetscCheckSameComm(mat,1,y,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)y),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3687,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3687,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3688 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3689 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3689,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3690 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3690,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3691 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3691,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3692 if (mat->rmap->N != y->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3692,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map
->N)
;
3693 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3693,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3694 if (x->map->n != y->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Vec x,Vec y: local dim %D %D",x->map->n,y->map->n)return PetscError(((MPI_Comm)0x44000001),3694,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Vec x,Vec y: local dim %D %D",x->
map->n,y->map->n)
;
3695 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3696 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3696,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3697 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3697,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3698
3699 ierr = PetscLogEventBegin(MAT_SolveAdd,mat,b,x,y)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveAdd].active) ? (*PetscLogPLB)((MAT_SolveAdd),0,(PetscObject
)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(y)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3699,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3700 if (mat->ops->solveadd) {
3701 ierr = (*mat->ops->solveadd)(mat,b,y,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3701,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3702 } else {
3703 /* do the solve then the add manually */
3704 if (x != y) {
3705 ierr = MatSolve(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3705,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3706 ierr = VecAXPY(x,one,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3706,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3707 } else {
3708 ierr = VecDuplicate(x,&tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3708,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3709 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3709,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3710 ierr = VecCopy(x,tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3710,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3711 ierr = MatSolve(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3711,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3712 ierr = VecAXPY(x,one,tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3712,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3713 ierr = VecDestroy(&tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3713,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3714 }
3715 }
3716 ierr = PetscLogEventEnd(MAT_SolveAdd,mat,b,x,y)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveAdd].active) ? (*PetscLogPLE)((MAT_SolveAdd),0,(PetscObject
)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(y)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3716,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3717 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3717,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3718 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3719}
3720
3721/*@
3722 MatSolveTranspose - Solves A' x = b, given a factored matrix.
3723
3724 Neighbor-wise Collective on Mat
3725
3726 Input Parameters:
3727+ mat - the factored matrix
3728- b - the right-hand-side vector
3729
3730 Output Parameter:
3731. x - the result vector
3732
3733 Notes:
3734 The vectors b and x cannot be the same. I.e., one cannot
3735 call MatSolveTranspose(A,x,x).
3736
3737 Most users should employ the simplified KSP interface for linear solvers
3738 instead of working directly with matrix algebra routines such as this.
3739 See, e.g., KSPCreate().
3740
3741 Level: developer
3742
3743.seealso: MatSolve(), MatSolveAdd(), MatSolveTransposeAdd()
3744@*/
3745PetscErrorCode MatSolveTranspose(Mat mat,Vec b,Vec x)
3746{
3747 PetscErrorCode ierr;
3748
3749 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3749; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3750 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3750,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3750,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3750,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3750,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3751 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3751,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3752 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3752,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3752,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3752,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3752,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3753 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3753,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3753,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3753,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3753,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3754 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3755 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3755,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3755,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3756 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3756,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3757 if (mat->rmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3757,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map
->N)
;
3758 if (mat->cmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->cmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3758,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->cmap->N,b->map
->N)
;
3759 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3760 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3760,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3761 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3762 ierr = PetscLogEventBegin(MAT_SolveTranspose,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveTranspose].active) ? (*PetscLogPLB)((MAT_SolveTranspose
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3762,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3763 if (mat->factorerrortype) {
3764 ierr = PetscInfo1(mat,"MatFactorError %D\n",mat->factorerrortype)PetscInfo_Private(__func__,mat,"MatFactorError %D\n",mat->
factorerrortype)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3764,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3765 ierr = VecSetInf(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3765,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3766 } else {
3767 if (!mat->ops->solvetranspose) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3767,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix type %s",((PetscObject)mat)->type_name)
;
3768 ierr = (*mat->ops->solvetranspose)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3768,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3769 }
3770 ierr = PetscLogEventEnd(MAT_SolveTranspose,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveTranspose].active) ? (*PetscLogPLE)((MAT_SolveTranspose
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3770,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3771 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3771,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3772 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3773}
3774
3775/*@
3776 MatSolveTransposeAdd - Computes x = y + inv(Transpose(A)) b, given a
3777 factored matrix.
3778
3779 Neighbor-wise Collective on Mat
3780
3781 Input Parameters:
3782+ mat - the factored matrix
3783. b - the right-hand-side vector
3784- y - the vector to be added to
3785
3786 Output Parameter:
3787. x - the result vector
3788
3789 Notes:
3790 The vectors b and x cannot be the same. I.e., one cannot
3791 call MatSolveTransposeAdd(A,x,y,x).
3792
3793 Most users should employ the simplified KSP interface for linear solvers
3794 instead of working directly with matrix algebra routines such as this.
3795 See, e.g., KSPCreate().
3796
3797 Level: developer
3798
3799.seealso: MatSolve(), MatSolveAdd(), MatSolveTranspose()
3800@*/
3801PetscErrorCode MatSolveTransposeAdd(Mat mat,Vec b,Vec y,Vec x)
3802{
3803 PetscScalar one = 1.0;
3804 PetscErrorCode ierr;
3805 Vec tmp;
3806
3807 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3807; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3808 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3808,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3808,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3809 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3809,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3810 PetscValidHeaderSpecific(y,VEC_CLASSID,2)do { if (!y) return PetscError(((MPI_Comm)0x44000001),3810,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3810,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3811 PetscValidHeaderSpecific(b,VEC_CLASSID,3)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3811,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3811,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3812 PetscValidHeaderSpecific(x,VEC_CLASSID,4)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3812,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3812,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3812,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),3812,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
3813 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3813,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3813,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3814 PetscCheckSameComm(mat,1,y,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)y),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3814,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3814,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
3815 PetscCheckSameComm(mat,1,x,4)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3815,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3815,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,4,__flag); } while (0)
;
3816 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3816,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3817 if (mat->rmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3817,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map
->N)
;
3818 if (mat->cmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->cmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3818,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->cmap->N,b->map
->N)
;
3819 if (mat->cmap->N != y->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->cmap->N,y->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3819,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec y: global dim %D %D",mat->cmap->N,y->map
->N)
;
3820 if (x->map->n != y->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Vec x,Vec y: local dim %D %D",x->map->n,y->map->n)return PetscError(((MPI_Comm)0x44000001),3820,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Vec x,Vec y: local dim %D %D",x->
map->n,y->map->n)
;
3821 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3822 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3822,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3823 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3823,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3824
3825 ierr = PetscLogEventBegin(MAT_SolveTransposeAdd,mat,b,x,y)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveTransposeAdd].active) ? (*PetscLogPLB)((MAT_SolveTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(y)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3825,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3826 if (mat->ops->solvetransposeadd) {
3827 if (mat->factorerrortype) {
3828 ierr = PetscInfo1(mat,"MatFactorError %D\n",mat->factorerrortype)PetscInfo_Private(__func__,mat,"MatFactorError %D\n",mat->
factorerrortype)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3828,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3829 ierr = VecSetInf(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3829,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3830 } else {
3831 ierr = (*mat->ops->solvetransposeadd)(mat,b,y,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3831,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3832 }
3833 } else {
3834 /* do the solve then the add manually */
3835 if (x != y) {
3836 ierr = MatSolveTranspose(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3836,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3837 ierr = VecAXPY(x,one,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3837,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3838 } else {
3839 ierr = VecDuplicate(x,&tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3839,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3840 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3840,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3841 ierr = VecCopy(x,tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3841,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3842 ierr = MatSolveTranspose(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3842,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3843 ierr = VecAXPY(x,one,tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3843,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3844 ierr = VecDestroy(&tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3844,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3845 }
3846 }
3847 ierr = PetscLogEventEnd(MAT_SolveTransposeAdd,mat,b,x,y)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveTransposeAdd].active) ? (*PetscLogPLE)((MAT_SolveTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(y)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3847,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3848 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3848,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3849 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3850}
3851/* ----------------------------------------------------------------*/
3852
3853/*@
3854 MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
3855
3856 Neighbor-wise Collective on Mat
3857
3858 Input Parameters:
3859+ mat - the matrix
3860. b - the right hand side
3861. omega - the relaxation factor
3862. flag - flag indicating the type of SOR (see below)
3863. shift - diagonal shift
3864. its - the number of iterations
3865- lits - the number of local iterations
3866
3867 Output Parameters:
3868. x - the solution (can contain an initial guess, use option SOR_ZERO_INITIAL_GUESS to indicate no guess)
3869
3870 SOR Flags:
3871. SOR_FORWARD_SWEEP - forward SOR
3872. SOR_BACKWARD_SWEEP - backward SOR
3873. SOR_SYMMETRIC_SWEEP - SSOR (symmetric SOR)
3874. SOR_LOCAL_FORWARD_SWEEP - local forward SOR
3875. SOR_LOCAL_BACKWARD_SWEEP - local forward SOR
3876. SOR_LOCAL_SYMMETRIC_SWEEP - local SSOR
3877. SOR_APPLY_UPPER, SOR_APPLY_LOWER - applies
3878 upper/lower triangular part of matrix to
3879 vector (with omega)
3880. SOR_ZERO_INITIAL_GUESS - zero initial guess
3881
3882 Notes:
3883 SOR_LOCAL_FORWARD_SWEEP, SOR_LOCAL_BACKWARD_SWEEP, and
3884 SOR_LOCAL_SYMMETRIC_SWEEP perform separate independent smoothings
3885 on each processor.
3886
3887 Application programmers will not generally use MatSOR() directly,
3888 but instead will employ the KSP/PC interface.
3889
3890 Notes:
3891 for BAIJ, SBAIJ, and AIJ matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
3892
3893 Notes for Advanced Users:
3894 The flags are implemented as bitwise inclusive or operations.
3895 For example, use (SOR_ZERO_INITIAL_GUESS | SOR_SYMMETRIC_SWEEP)
3896 to specify a zero initial guess for SSOR.
3897
3898 Most users should employ the simplified KSP interface for linear solvers
3899 instead of working directly with matrix algebra routines such as this.
3900 See, e.g., KSPCreate().
3901
3902 Vectors x and b CANNOT be the same
3903
3904 Developer Note: We should add block SOR support for AIJ matrices with block size set to great than one and no inodes
3905
3906 Level: developer
3907
3908@*/
3909PetscErrorCode MatSOR(Mat mat,Vec b,PetscReal omega,MatSORType flag,PetscReal shift,PetscInt its,PetscInt lits,Vec x)
3910{
3911 PetscErrorCode ierr;
3912
3913 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3913; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3914 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3914,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3914,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3914,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3914,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3915 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3915,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3916 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3916,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3916,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3916,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3916,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3917 PetscValidHeaderSpecific(x,VEC_CLASSID,8)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3917,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",8); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3917,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,8); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3917,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,8); else return PetscError(((MPI_Comm)0x44000001),3917,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",8); } } while (0)
;
3918 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3918,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3918,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
3919 PetscCheckSameComm(mat,1,x,8)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3919,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),3919,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,8,__flag); } while (0)
;
3920 if (!mat->ops->sor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3920,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3921 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3921,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3922 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3922,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
3923 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3923,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3924 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3924,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3925 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3925,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3926 if (its <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D positive",its)return PetscError(((MPI_Comm)0x44000001),3926,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Relaxation requires global its %D positive"
,its)
;
3927 if (lits <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires local its %D positive",lits)return PetscError(((MPI_Comm)0x44000001),3927,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Relaxation requires local its %D positive"
,lits)
;
3928 if (b == x) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_IDN,"b and x vector cannot be the same")return PetscError(((MPI_Comm)0x44000001),3928,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,61,PETSC_ERROR_INITIAL,"b and x vector cannot be the same")
;
3929
3930 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3930,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3931 ierr = PetscLogEventBegin(MAT_SOR,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SOR].active) ? (*PetscLogPLB)((MAT_SOR),0,(PetscObject)(
mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3931,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3932 ierr =(*mat->ops->sor)(mat,b,omega,flag,shift,its,lits,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3932,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3933 ierr = PetscLogEventEnd(MAT_SOR,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SOR].active) ? (*PetscLogPLE)((MAT_SOR),0,(PetscObject)(
mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3933,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3934 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3934,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3935 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3936}
3937
3938/*
3939 Default matrix copy routine.
3940*/
3941PetscErrorCode MatCopy_Basic(Mat A,Mat B,MatStructure str)
3942{
3943 PetscErrorCode ierr;
3944 PetscInt i,rstart = 0,rend = 0,nz;
3945 const PetscInt *cwork;
3946 const PetscScalar *vwork;
3947
3948 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3948; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3949 if (B->assembled) {
3950 ierr = MatZeroEntries(B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3950,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3951 }
3952 if (str == SAME_NONZERO_PATTERN) {
3953 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3953,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3954 for (i=rstart; i<rend; i++) {
3955 ierr = MatGetRow(A,i,&nz,&cwork,&vwork);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3955,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3956 ierr = MatSetValues(B,1,&i,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3956,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3957 ierr = MatRestoreRow(A,i,&nz,&cwork,&vwork);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3957,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3958 }
3959 } else {
3960 ierr = MatAYPX(B,0.0,A,str);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3960,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3961 }
3962 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3962,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3963 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3963,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3964 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3965}
3966
3967/*@
3968 MatCopy - Copies a matrix to another matrix.
3969
3970 Collective on Mat
3971
3972 Input Parameters:
3973+ A - the matrix
3974- str - SAME_NONZERO_PATTERN or DIFFERENT_NONZERO_PATTERN
3975
3976 Output Parameter:
3977. B - where the copy is put
3978
3979 Notes:
3980 If you use SAME_NONZERO_PATTERN then the two matrices had better have the
3981 same nonzero pattern or the routine will crash.
3982
3983 MatCopy() copies the matrix entries of a matrix to another existing
3984 matrix (after first zeroing the second matrix). A related routine is
3985 MatConvert(), which first creates a new matrix and then copies the data.
3986
3987 Level: intermediate
3988
3989.seealso: MatConvert(), MatDuplicate()
3990
3991@*/
3992PetscErrorCode MatCopy(Mat A,Mat B,MatStructure str)
3993{
3994 PetscErrorCode ierr;
3995 PetscInt i;
3996
3997 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3997; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3998 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),3998,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3998,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3999 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),3999,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3999,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3999,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3999,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4000 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),4000,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
4001 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),4001,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
4002 PetscCheckSameComm(A,1,B,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)B),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),4002,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),4002,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
4003 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4003,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
4004 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),4004,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4005 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),4005,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4006 if (A->rmap->N != B->rmap->N || A->cmap->N != B->cmap->N) SETERRQ4(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim (%D,%D) (%D,%D)",A->rmap->N,B->rmap->N,A->cmap->N,B->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),4006,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat B: global dim (%D,%D) (%D,%D)",A->rmap->N,B
->rmap->N,A->cmap->N,B->cmap->N)
;
4007 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4007,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
4008 if (A == B) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4009
4010 ierr = PetscLogEventBegin(MAT_Copy,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Copy].active) ? (*PetscLogPLB)((MAT_Copy),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4010,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4011 if (A->ops->copy) {
4012 ierr = (*A->ops->copy)(A,B,str);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4012,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4013 } else { /* generic conversion */
4014 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4014,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4015 }
4016
4017 B->stencil.dim = A->stencil.dim;
4018 B->stencil.noc = A->stencil.noc;
4019 for (i=0; i<=A->stencil.dim; i++) {
4020 B->stencil.dims[i] = A->stencil.dims[i];
4021 B->stencil.starts[i] = A->stencil.starts[i];
4022 }
4023
4024 ierr = PetscLogEventEnd(MAT_Copy,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Copy].active) ? (*PetscLogPLE)((MAT_Copy),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4025 ierr = PetscObjectStateIncrease((PetscObject)B)(((PetscObject)B)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4025,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4026 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4027}
4028
4029/*@C
4030 MatConvert - Converts a matrix to another matrix, either of the same
4031 or different type.
4032
4033 Collective on Mat
4034
4035 Input Parameters:
4036+ mat - the matrix
4037. newtype - new matrix type. Use MATSAME to create a new matrix of the
4038 same type as the original matrix.
4039- reuse - denotes if the destination matrix is to be created or reused.
4040 Use MAT_INPLACE_MATRIX for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4041 MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX (can only be used after the first call was made with MAT_INITIAL_MATRIX, causes the matrix space in M to be reused).
4042
4043 Output Parameter:
4044. M - pointer to place new matrix
4045
4046 Notes:
4047 MatConvert() first creates a new matrix and then copies the data from
4048 the first matrix. A related routine is MatCopy(), which copies the matrix
4049 entries of one matrix to another already existing matrix context.
4050
4051 Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4052 the MPI communicator of the generated matrix is always the same as the communicator
4053 of the input matrix.
4054
4055 Level: intermediate
4056
4057.seealso: MatCopy(), MatDuplicate()
4058@*/
4059PetscErrorCode MatConvert(Mat mat, MatType newtype,MatReuse reuse,Mat *M)
4060{
4061 PetscErrorCode ierr;
4062 PetscBool sametype,issame,flg;
4063 char convname[256],mtype[256];
4064 Mat B;
4065
4066 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4066; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4067 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4067,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4067,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4067,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4067,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4068 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4068,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4069 PetscValidPointer(M,3)do { if (!M) return PetscError(((MPI_Comm)0x44000001),4069,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(M,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),4069,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
4070 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4070,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4071 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4071,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4072 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4072,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4073
4074 ierr = PetscOptionsGetString(((PetscObject)mat)->options,((PetscObject)mat)->prefix,"-matconvert_type",mtype,256,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4074,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4075 if (flg) {
4076 newtype = mtype;
4077 }
4078 ierr = PetscObjectTypeCompare((PetscObject)mat,newtype,&sametype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4078,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4079 ierr = PetscStrcmp(newtype,"same",&issame);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4079,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4080 if ((reuse == MAT_INPLACE_MATRIX) && (mat != *M)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_INPLACE_MATRIX requires same input and output matrix")return PetscError(PetscObjectComm((PetscObject)mat),4080,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MAT_INPLACE_MATRIX requires same input and output matrix")
;
4081 if ((reuse == MAT_REUSE_MATRIX) && (mat == *M)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX")return PetscError(PetscObjectComm((PetscObject)mat),4081,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX"
)
;
4082
4083 if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4084
4085 if ((sametype || issame) && (reuse==MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4086 ierr = (*mat->ops->duplicate)(mat,MAT_COPY_VALUES,M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4086,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4087 } else {
4088 PetscErrorCode (*conv)(Mat, MatType,MatReuse,Mat*)=NULL((void*)0);
4089 const char *prefix[3] = {"seq","mpi",""};
4090 PetscInt i;
4091 /*
4092 Order of precedence:
4093 0) See if newtype is a superclass of the current matrix.
4094 1) See if a specialized converter is known to the current matrix.
4095 2) See if a specialized converter is known to the desired matrix class.
4096 3) See if a good general converter is registered for the desired class
4097 (as of 6/27/03 only MATMPIADJ falls into this category).
4098 4) See if a good general converter is known for the current matrix.
4099 5) Use a really basic converter.
4100 */
4101
4102 /* 0) See if newtype is a superclass of the current matrix.
4103 i.e mat is mpiaij and newtype is aij */
4104 for (i=0; i<2; i++) {
4105 ierr = PetscStrncpy(convname,prefix[i],sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4105,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4106 ierr = PetscStrlcat(convname,newtype,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4106,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4107 ierr = PetscStrcmp(convname,((PetscObject)mat)->type_name,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4107,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4108 ierr = PetscInfo3(mat,"Check superclass %s %s -> %d\n",convname,((PetscObject)mat)->type_name,flg)PetscInfo_Private(__func__,mat,"Check superclass %s %s -> %d\n"
,convname,((PetscObject)mat)->type_name,flg)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4108,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4109 if (flg) {
4110 if (reuse == MAT_INPLACE_MATRIX) {
4111 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4112 } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4113 ierr = (*mat->ops->duplicate)(mat,MAT_COPY_VALUES,M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4113,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4114 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4115 } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4116 ierr = MatCopy(mat,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4116,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4117 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4118 }
4119 }
4120 }
4121 /* 1) See if a specialized converter is known to the current matrix and the desired class */
4122 for (i=0; i<3; i++) {
4123 ierr = PetscStrncpy(convname,"MatConvert_",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4123,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4124 ierr = PetscStrlcat(convname,((PetscObject)mat)->type_name,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4124,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4125 ierr = PetscStrlcat(convname,"_",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4125,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4126 ierr = PetscStrlcat(convname,prefix[i],sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4126,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4127 ierr = PetscStrlcat(convname,issame ? ((PetscObject)mat)->type_name : newtype,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4127,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4128 ierr = PetscStrlcat(convname,"_C",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4128,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4129 ierr = PetscObjectQueryFunction((PetscObject)mat,convname,&conv)PetscObjectQueryFunction_Private(((PetscObject)mat),(convname
),(PetscVoidFunction*)(&conv))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4129,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4130 ierr = PetscInfo3(mat,"Check specialized (1) %s (%s) -> %d\n",convname,((PetscObject)mat)->type_name,!!conv)PetscInfo_Private(__func__,mat,"Check specialized (1) %s (%s) -> %d\n"
,convname,((PetscObject)mat)->type_name,!!conv)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4130,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4131 if (conv) goto foundconv;
4132 }
4133
4134 /* 2) See if a specialized converter is known to the desired matrix class. */
4135 ierr = MatCreate(PetscObjectComm((PetscObject)mat),&B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4136 ierr = MatSetSizes(B,mat->rmap->n,mat->cmap->n,mat->rmap->N,mat->cmap->N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4136,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4137 ierr = MatSetType(B,newtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4137,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4138 for (i=0; i<3; i++) {
4139 ierr = PetscStrncpy(convname,"MatConvert_",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4139,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4140 ierr = PetscStrlcat(convname,((PetscObject)mat)->type_name,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4140,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4141 ierr = PetscStrlcat(convname,"_",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4141,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4142 ierr = PetscStrlcat(convname,prefix[i],sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4142,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4143 ierr = PetscStrlcat(convname,newtype,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4143,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4144 ierr = PetscStrlcat(convname,"_C",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4144,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4145 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv)PetscObjectQueryFunction_Private(((PetscObject)B),(convname),
(PetscVoidFunction*)(&conv))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4145,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4146 ierr = PetscInfo3(mat,"Check specialized (2) %s (%s) -> %d\n",convname,((PetscObject)B)->type_name,!!conv)PetscInfo_Private(__func__,mat,"Check specialized (2) %s (%s) -> %d\n"
,convname,((PetscObject)B)->type_name,!!conv)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4146,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4147 if (conv) {
4148 ierr = MatDestroy(&B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4148,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4149 goto foundconv;
4150 }
4151 }
4152
4153 /* 3) See if a good general converter is registered for the desired class */
4154 conv = B->ops->convertfrom;
4155 ierr = PetscInfo2(mat,"Check convertfrom (%s) -> %d\n",((PetscObject)B)->type_name,!!conv)PetscInfo_Private(__func__,mat,"Check convertfrom (%s) -> %d\n"
,((PetscObject)B)->type_name,!!conv)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4155,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4156 ierr = MatDestroy(&B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4156,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4157 if (conv) goto foundconv;
4158
4159 /* 4) See if a good general converter is known for the current matrix */
4160 if (mat->ops->convert) {
4161 conv = mat->ops->convert;
4162 }
4163 ierr = PetscInfo2(mat,"Check general convert (%s) -> %d\n",((PetscObject)mat)->type_name,!!conv)PetscInfo_Private(__func__,mat,"Check general convert (%s) -> %d\n"
,((PetscObject)mat)->type_name,!!conv)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4163,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4164 if (conv) goto foundconv;
4165
4166 /* 5) Use a really basic converter. */
4167 ierr = PetscInfo(mat,"Using MatConvert_Basic\n")PetscInfo_Private(__func__,mat,"Using MatConvert_Basic\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4167,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4168 conv = MatConvert_Basic;
4169
4170foundconv:
4171 ierr = PetscLogEventBegin(MAT_Convert,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Convert].active) ? (*PetscLogPLB)((MAT_Convert),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4171,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4172 ierr = (*conv)(mat,newtype,reuse,M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4172,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4173 if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4174 /* the block sizes must be same if the mappings are copied over */
4175 (*M)->rmap->bs = mat->rmap->bs;
4176 (*M)->cmap->bs = mat->cmap->bs;
4177 ierr = PetscObjectReference((PetscObject)mat->rmap->mapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4177,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4178 ierr = PetscObjectReference((PetscObject)mat->cmap->mapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4178,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4179 (*M)->rmap->mapping = mat->rmap->mapping;
4180 (*M)->cmap->mapping = mat->cmap->mapping;
4181 }
4182 (*M)->stencil.dim = mat->stencil.dim;
4183 (*M)->stencil.noc = mat->stencil.noc;
4184 for (i=0; i<=mat->stencil.dim; i++) {
4185 (*M)->stencil.dims[i] = mat->stencil.dims[i];
4186 (*M)->stencil.starts[i] = mat->stencil.starts[i];
4187 }
4188 ierr = PetscLogEventEnd(MAT_Convert,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Convert].active) ? (*PetscLogPLE)((MAT_Convert),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4188,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4189 }
4190 ierr = PetscObjectStateIncrease((PetscObject)*M)(((PetscObject)*M)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4190,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4191
4192 /* Copy Mat options */
4193 if (mat->symmetric) {ierr = MatSetOption(*M,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4193,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4194 if (mat->hermitian) {ierr = MatSetOption(*M,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4194,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4195 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4196}
4197
4198/*@C
4199 MatFactorGetSolverType - Returns name of the package providing the factorization routines
4200
4201 Not Collective
4202
4203 Input Parameter:
4204. mat - the matrix, must be a factored matrix
4205
4206 Output Parameter:
4207. type - the string name of the package (do not free this string)
4208
4209 Notes:
4210 In Fortran you pass in a empty string and the package name will be copied into it.
4211 (Make sure the string is long enough)
4212
4213 Level: intermediate
4214
4215.seealso: MatCopy(), MatDuplicate(), MatGetFactorAvailable(), MatGetFactor()
4216@*/
4217PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4218{
4219 PetscErrorCode ierr, (*conv)(Mat,MatSolverType*);
4220
4221 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4221; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4222 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4222,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4222,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4223 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4223,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4224 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4224,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Only for factored matrix")
;
4225 ierr = PetscObjectQueryFunction((PetscObject)mat,"MatFactorGetSolverType_C",&conv)PetscObjectQueryFunction_Private(((PetscObject)mat),("MatFactorGetSolverType_C"
),(PetscVoidFunction*)(&conv))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4225,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4226 if (!conv) {
4227 *type = MATSOLVERPETSC"petsc";
4228 } else {
4229 ierr = (*conv)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4229,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4230 }
4231 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4232}
4233
4234typedef struct _MatSolverTypeForSpecifcType* MatSolverTypeForSpecifcType;
4235struct _MatSolverTypeForSpecifcType {
4236 MatType mtype;
4237 PetscErrorCode (*getfactor[4])(Mat,MatFactorType,Mat*);
4238 MatSolverTypeForSpecifcType next;
4239};
4240
4241typedef struct _MatSolverTypeHolder* MatSolverTypeHolder;
4242struct _MatSolverTypeHolder {
4243 char *name;
4244 MatSolverTypeForSpecifcType handlers;
4245 MatSolverTypeHolder next;
4246};
4247
4248static MatSolverTypeHolder MatSolverTypeHolders = NULL((void*)0);
4249
4250/*@C
4251 MatSolvePackageRegister - Registers a MatSolverType that works for a particular matrix type
4252
4253 Input Parameters:
4254+ package - name of the package, for example petsc or superlu
4255. mtype - the matrix type that works with this package
4256. ftype - the type of factorization supported by the package
4257- getfactor - routine that will create the factored matrix ready to be used
4258
4259 Level: intermediate
4260
4261.seealso: MatCopy(), MatDuplicate(), MatGetFactorAvailable()
4262@*/
4263PetscErrorCode MatSolverTypeRegister(MatSolverType package,MatType mtype,MatFactorType ftype,PetscErrorCode (*getfactor)(Mat,MatFactorType,Mat*))
4264{
4265 PetscErrorCode ierr;
4266 MatSolverTypeHolder next = MatSolverTypeHolders,prev = NULL((void*)0);
4267 PetscBool flg;
4268 MatSolverTypeForSpecifcType inext,iprev = NULL((void*)0);
4269
4270 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4270; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4271 ierr = MatInitializePackage();CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4271,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4272 if (!next) {
4273 ierr = PetscNew(&MatSolverTypeHolders)PetscMallocA(1,PETSC_TRUE,4273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&MatSolverTypeHolders))),((&MatSolverTypeHolders
)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4274 ierr = PetscStrallocpy(package,&MatSolverTypeHolders->name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4275 ierr = PetscNew(&MatSolverTypeHolders->handlers)PetscMallocA(1,PETSC_TRUE,4275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&MatSolverTypeHolders->handlers
))),((&MatSolverTypeHolders->handlers)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4276 ierr = PetscStrallocpy(mtype,(char **)&MatSolverTypeHolders->handlers->mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4277 MatSolverTypeHolders->handlers->getfactor[(int)ftype-1] = getfactor;
4278 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4279 }
4280 while (next) {
4281 ierr = PetscStrcasecmp(package,next->name,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4281,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4282 if (flg) {
4283 if (!next->handlers) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatSolverTypeHolder is missing handlers")return PetscError(((MPI_Comm)0x44000001),4283,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,77,PETSC_ERROR_INITIAL,"MatSolverTypeHolder is missing handlers"
)
;
4284 inext = next->handlers;
4285 while (inext) {
4286 ierr = PetscStrcasecmp(mtype,inext->mtype,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4286,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4287 if (flg) {
4288 inext->getfactor[(int)ftype-1] = getfactor;
4289 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4290 }
4291 iprev = inext;
4292 inext = inext->next;
4293 }
4294 ierr = PetscNew(&iprev->next)PetscMallocA(1,PETSC_TRUE,4294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&iprev->next))),((&iprev->
next)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4295 ierr = PetscStrallocpy(mtype,(char **)&iprev->next->mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4295,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4296 iprev->next->getfactor[(int)ftype-1] = getfactor;
4297 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4298 }
4299 prev = next;
4300 next = next->next;
4301 }
4302 ierr = PetscNew(&prev->next)PetscMallocA(1,PETSC_TRUE,4302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&prev->next))),((&prev->
next)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4303 ierr = PetscStrallocpy(package,&prev->next->name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4304 ierr = PetscNew(&prev->next->handlers)PetscMallocA(1,PETSC_TRUE,4304,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&prev->next->handlers))),((
&prev->next->handlers)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4304,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4305 ierr = PetscStrallocpy(mtype,(char **)&prev->next->handlers->mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4305,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4306 prev->next->handlers->getfactor[(int)ftype-1] = getfactor;
4307 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4308}
4309
4310/*@C
4311 MatSolvePackageGet - Get's the function that creates the factor matrix if it exist
4312
4313 Input Parameters:
4314+ package - name of the package, for example petsc or superlu
4315. ftype - the type of factorization supported by the package
4316- mtype - the matrix type that works with this package
4317
4318 Output Parameters:
4319+ foundpackage - PETSC_TRUE if the package was registered
4320. foundmtype - PETSC_TRUE if the package supports the requested mtype
4321- getfactor - routine that will create the factored matrix ready to be used or NULL if not found
4322
4323 Level: intermediate
4324
4325.seealso: MatCopy(), MatDuplicate(), MatGetFactorAvailable()
4326@*/
4327PetscErrorCode MatSolverTypeGet(MatSolverType package,MatType mtype,MatFactorType ftype,PetscBool *foundpackage,PetscBool *foundmtype,PetscErrorCode (**getfactor)(Mat,MatFactorType,Mat*))
4328{
4329 PetscErrorCode ierr;
4330 MatSolverTypeHolder next = MatSolverTypeHolders;
4331 PetscBool flg;
4332 MatSolverTypeForSpecifcType inext;
4333
4334 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4334; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4335 if (foundpackage) *foundpackage = PETSC_FALSE;
4336 if (foundmtype) *foundmtype = PETSC_FALSE;
4337 if (getfactor) *getfactor = NULL((void*)0);
4338
4339 if (package) {
4340 while (next) {
4341 ierr = PetscStrcasecmp(package,next->name,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4341,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4342 if (flg) {
4343 if (foundpackage) *foundpackage = PETSC_TRUE;
4344 inext = next->handlers;
4345 while (inext) {
4346 ierr = PetscStrbeginswith(mtype,inext->mtype,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4346,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4347 if (flg) {
4348 if (foundmtype) *foundmtype = PETSC_TRUE;
4349 if (getfactor) *getfactor = inext->getfactor[(int)ftype-1];
4350 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4351 }
4352 inext = inext->next;
4353 }
4354 }
4355 next = next->next;
4356 }
4357 } else {
4358 while (next) {
4359 inext = next->handlers;
4360 while (inext) {
4361 ierr = PetscStrbeginswith(mtype,inext->mtype,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4362 if (flg && inext->getfactor[(int)ftype-1]) {
4363 if (foundpackage) *foundpackage = PETSC_TRUE;
4364 if (foundmtype) *foundmtype = PETSC_TRUE;
4365 if (getfactor) *getfactor = inext->getfactor[(int)ftype-1];
4366 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4367 }
4368 inext = inext->next;
4369 }
4370 next = next->next;
4371 }
4372 }
4373 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4374}
4375
4376PetscErrorCode MatSolverTypeDestroy(void)
4377{
4378 PetscErrorCode ierr;
4379 MatSolverTypeHolder next = MatSolverTypeHolders,prev;
4380 MatSolverTypeForSpecifcType inext,iprev;
4381
4382 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4382; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4383 while (next) {
4384 ierr = PetscFree(next->name)((*PetscTrFree)((void*)(next->name),4384,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((next->name) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4384,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4385 inext = next->handlers;
4386 while (inext) {
4387 ierr = PetscFree(inext->mtype)((*PetscTrFree)((void*)(inext->mtype),4387,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((inext->mtype) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4387,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4388 iprev = inext;
4389 inext = inext->next;
4390 ierr = PetscFree(iprev)((*PetscTrFree)((void*)(iprev),4390,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((iprev) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4390,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4391 }
4392 prev = next;
4393 next = next->next;
4394 ierr = PetscFree(prev)((*PetscTrFree)((void*)(prev),4394,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((prev) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4394,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4395 }
4396 MatSolverTypeHolders = NULL((void*)0);
4397 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4398}
4399
4400/*@C
4401 MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic()
4402
4403 Collective on Mat
4404
4405 Input Parameters:
4406+ mat - the matrix
4407. type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4408- ftype - factor type, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ICC, MAT_FACTOR_ILU,
4409
4410 Output Parameters:
4411. f - the factor matrix used with MatXXFactorSymbolic() calls
4412
4413 Notes:
4414 Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4415 such as pastix, superlu, mumps etc.
4416
4417 PETSc must have been ./configure to use the external solver, using the option --download-package
4418
4419 Level: intermediate
4420
4421.seealso: MatCopy(), MatDuplicate(), MatGetFactorAvailable()
4422@*/
4423PetscErrorCode MatGetFactor(Mat mat, MatSolverType type,MatFactorType ftype,Mat *f)
4424{
4425 PetscErrorCode ierr,(*conv)(Mat,MatFactorType,Mat*);
4426 PetscBool foundpackage,foundmtype;
4427
4428 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4428; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4429 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4429,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4429,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4429,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4429,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4430 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4430,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4431
4432 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4432,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4433 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4433,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4434
4435 ierr = MatSolverTypeGet(type,((PetscObject)mat)->type_name,ftype,&foundpackage,&foundmtype,&conv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4435,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4436 if (!foundpackage) {
4437 if (type) {
4438 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"Could not locate solver package %s. Perhaps you must ./configure with --download-%s",type,type)return PetscError(PetscObjectComm((PetscObject)mat),4438,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",92,PETSC_ERROR_INITIAL
,"Could not locate solver package %s. Perhaps you must ./configure with --download-%s"
,type,type)
;
4439 } else {
4440 SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"Could not locate a solver package. Perhaps you must ./configure with --download-<package>")return PetscError(PetscObjectComm((PetscObject)mat),4440,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",92,PETSC_ERROR_INITIAL
,"Could not locate a solver package. Perhaps you must ./configure with --download-<package>"
)
;
4441 }
4442 }
4443
4444 if (!foundmtype) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"MatSolverType %s does not support matrix type %s",type,((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4444,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",92,PETSC_ERROR_INITIAL
,"MatSolverType %s does not support matrix type %s",type,((PetscObject
)mat)->type_name)
;
4445 if (!conv) SETERRQ3(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"MatSolverType %s does not support factorization type %s for matrix type %s",type,MatFactorTypes[ftype],((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4445,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",92,PETSC_ERROR_INITIAL
,"MatSolverType %s does not support factorization type %s for matrix type %s"
,type,MatFactorTypes[ftype],((PetscObject)mat)->type_name)
;
4446
4447#if defined(PETSC_USE_COMPLEX)
4448 if (mat->hermitian && !mat->symmetric && (ftype == MAT_FACTOR_CHOLESKY||ftype == MAT_FACTOR_ICC)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hermitian CHOLESKY or ICC Factor is not supported")return PetscError(((MPI_Comm)0x44000001),4448,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Hermitian CHOLESKY or ICC Factor is not supported"
)
;
4449#endif
4450
4451 ierr = (*conv)(mat,ftype,f);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4451,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4452 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4453}
4454
4455/*@C
4456 MatGetFactorAvailable - Returns a a flag if matrix supports particular package and factor type
4457
4458 Not Collective
4459
4460 Input Parameters:
4461+ mat - the matrix
4462. type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4463- ftype - factor type, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ICC, MAT_FACTOR_ILU,
4464
4465 Output Parameter:
4466. flg - PETSC_TRUE if the factorization is available
4467
4468 Notes:
4469 Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4470 such as pastix, superlu, mumps etc.
4471
4472 PETSc must have been ./configure to use the external solver, using the option --download-package
4473
4474 Level: intermediate
4475
4476.seealso: MatCopy(), MatDuplicate(), MatGetFactor()
4477@*/
4478PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type,MatFactorType ftype,PetscBool *flg)
4479{
4480 PetscErrorCode ierr, (*gconv)(Mat,MatFactorType,Mat*);
4481
4482 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4482; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4483 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4483,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4483,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4483,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4483,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4484 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4484,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4485
4486 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4486,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4487 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4487,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4488
4489 *flg = PETSC_FALSE;
4490 ierr = MatSolverTypeGet(type,((PetscObject)mat)->type_name,ftype,NULL((void*)0),NULL((void*)0),&gconv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4490,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4491 if (gconv) {
4492 *flg = PETSC_TRUE;
4493 }
4494 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4495}
4496
4497#include <petscdmtypes.h>
4498
4499/*@
4500 MatDuplicate - Duplicates a matrix including the non-zero structure.
4501
4502 Collective on Mat
4503
4504 Input Parameters:
4505+ mat - the matrix
4506- op - One of MAT_DO_NOT_COPY_VALUES, MAT_COPY_VALUES, or MAT_SHARE_NONZERO_PATTERN.
4507 See the manual page for MatDuplicateOption for an explanation of these options.
4508
4509 Output Parameter:
4510. M - pointer to place new matrix
4511
4512 Level: intermediate
4513
4514 Notes:
4515 You cannot change the nonzero pattern for the parent or child matrix if you use MAT_SHARE_NONZERO_PATTERN.
4516 When original mat is a product of matrix operation, e.g., an output of MatMatMult() or MatCreateSubMatrix(), only the simple matrix data structure of mat is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated. User should not use MatDuplicate() to create new matrix M if M is intended to be reused as the product of matrix operation.
4517
4518.seealso: MatCopy(), MatConvert(), MatDuplicateOption
4519@*/
4520PetscErrorCode MatDuplicate(Mat mat,MatDuplicateOption op,Mat *M)
4521{
4522 PetscErrorCode ierr;
4523 Mat B;
4524 PetscInt i;
4525 DM dm;
4526 void (*viewf)(void);
4527
4528 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4528; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4529 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4529,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4529,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4529,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4529,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4530 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4530,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4531 PetscValidPointer(M,3)do { if (!M) return PetscError(((MPI_Comm)0x44000001),4531,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(M,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),4531,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
4532 if (op == MAT_COPY_VALUES && !mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"MAT_COPY_VALUES not allowed for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4532,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"MAT_COPY_VALUES not allowed for unassembled matrix")
;
4533 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4533,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4534 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4534,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4535
4536 *M = 0;
4537 if (!mat->ops->duplicate) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not written for this matrix type")return PetscError(PetscObjectComm((PetscObject)mat),4537,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Not written for this matrix type")
;
4538 ierr = PetscLogEventBegin(MAT_Convert,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Convert].active) ? (*PetscLogPLB)((MAT_Convert),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4538,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4539 ierr = (*mat->ops->duplicate)(mat,op,M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4539,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4540 B = *M;
4541
4542 ierr = MatGetOperation(mat,MATOP_VIEW,&viewf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4542,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4543 if (viewf) {
4544 ierr = MatSetOperation(B,MATOP_VIEW,viewf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4544,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4545 }
4546
4547 B->stencil.dim = mat->stencil.dim;
4548 B->stencil.noc = mat->stencil.noc;
4549 for (i=0; i<=mat->stencil.dim; i++) {
4550 B->stencil.dims[i] = mat->stencil.dims[i];
4551 B->stencil.starts[i] = mat->stencil.starts[i];
4552 }
4553
4554 B->nooffproczerorows = mat->nooffproczerorows;
4555 B->nooffprocentries = mat->nooffprocentries;
4556
4557 ierr = PetscObjectQuery((PetscObject) mat, "__PETSc_dm", (PetscObject*) &dm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4557,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4558 if (dm) {
4559 ierr = PetscObjectCompose((PetscObject) B, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4559,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4560 }
4561 ierr = PetscLogEventEnd(MAT_Convert,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Convert].active) ? (*PetscLogPLE)((MAT_Convert),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4561,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4562 ierr = PetscObjectStateIncrease((PetscObject)B)(((PetscObject)B)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4562,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4563 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4564}
4565
4566/*@
4567 MatGetDiagonal - Gets the diagonal of a matrix.
4568
4569 Logically Collective on Mat
4570
4571 Input Parameters:
4572+ mat - the matrix
4573- v - the vector for storing the diagonal
4574
4575 Output Parameter:
4576. v - the diagonal of the matrix
4577
4578 Level: intermediate
4579
4580 Note:
4581 Currently only correct in parallel for square matrices.
4582
4583.seealso: MatGetRow(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMaxAbs()
4584@*/
4585PetscErrorCode MatGetDiagonal(Mat mat,Vec v)
4586{
4587 PetscErrorCode ierr;
4588
4589 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4589; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4590 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4590,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4590,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4590,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4590,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4591 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4591,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4592 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4592,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4592,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4592,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4592,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4593 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4593,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4594 if (!mat->ops->getdiagonal) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4594,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
4595 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4595,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4596
4597 ierr = (*mat->ops->getdiagonal)(mat,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4597,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4598 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4598,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4599 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4600}
4601
4602/*@C
4603 MatGetRowMin - Gets the minimum value (of the real part) of each
4604 row of the matrix
4605
4606 Logically Collective on Mat
4607
4608 Input Parameters:
4609. mat - the matrix
4610
4611 Output Parameter:
4612+ v - the vector for storing the maximums
4613- idx - the indices of the column found for each row (optional)
4614
4615 Level: intermediate
4616
4617 Notes:
4618 The result of this call are the same as if one converted the matrix to dense format
4619 and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4620
4621 This code is only implemented for a couple of matrix formats.
4622
4623.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMaxAbs(),
4624 MatGetRowMax()
4625@*/
4626PetscErrorCode MatGetRowMin(Mat mat,Vec v,PetscInt idx[])
4627{
4628 PetscErrorCode ierr;
4629
4630 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4630; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4631 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4631,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4631,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4631,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4631,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4632 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4632,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4633 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4633,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4633,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4633,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4633,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4634 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4634,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4635 if (!mat->ops->getrowmax) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),4635,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
4636 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4636,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4637
4638 ierr = (*mat->ops->getrowmin)(mat,v,idx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4638,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4639 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4639,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4640 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4641}
4642
4643/*@C
4644 MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
4645 row of the matrix
4646
4647 Logically Collective on Mat
4648
4649 Input Parameters:
4650. mat - the matrix
4651
4652 Output Parameter:
4653+ v - the vector for storing the minimums
4654- idx - the indices of the column found for each row (or NULL if not needed)
4655
4656 Level: intermediate
4657
4658 Notes:
4659 if a row is completely empty or has only 0.0 values then the idx[] value for that
4660 row is 0 (the first column).
4661
4662 This code is only implemented for a couple of matrix formats.
4663
4664.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMax(), MatGetRowMaxAbs(), MatGetRowMin()
4665@*/
4666PetscErrorCode MatGetRowMinAbs(Mat mat,Vec v,PetscInt idx[])
4667{
4668 PetscErrorCode ierr;
4669
4670 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4670; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4671 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4671,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4671,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4671,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4671,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4672 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4672,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4673 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4673,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4673,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4673,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4673,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4674 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4674,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4675 if (!mat->ops->getrowminabs) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4675,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
4676 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4676,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4677 if (idx) {ierr = PetscArrayzero(idx,mat->rmap->n)PetscMemzero(idx,(mat->rmap->n)*sizeof(*(idx)));;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4677,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4678
4679 ierr = (*mat->ops->getrowminabs)(mat,v,idx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4679,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4680 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4680,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4681 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4682}
4683
4684/*@C
4685 MatGetRowMax - Gets the maximum value (of the real part) of each
4686 row of the matrix
4687
4688 Logically Collective on Mat
4689
4690 Input Parameters:
4691. mat - the matrix
4692
4693 Output Parameter:
4694+ v - the vector for storing the maximums
4695- idx - the indices of the column found for each row (optional)
4696
4697 Level: intermediate
4698
4699 Notes:
4700 The result of this call are the same as if one converted the matrix to dense format
4701 and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4702
4703 This code is only implemented for a couple of matrix formats.
4704
4705.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMaxAbs(), MatGetRowMin()
4706@*/
4707PetscErrorCode MatGetRowMax(Mat mat,Vec v,PetscInt idx[])
4708{
4709 PetscErrorCode ierr;
4710
4711 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4711; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4712 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4712,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4712,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4712,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4712,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4713 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4713,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4714 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4714,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4714,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4714,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4714,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4715 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4715,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4716 if (!mat->ops->getrowmax) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4716,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
4717 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4717,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4718
4719 ierr = (*mat->ops->getrowmax)(mat,v,idx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4719,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4720 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4720,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4721 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4722}
4723
4724/*@C
4725 MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
4726 row of the matrix
4727
4728 Logically Collective on Mat
4729
4730 Input Parameters:
4731. mat - the matrix
4732
4733 Output Parameter:
4734+ v - the vector for storing the maximums
4735- idx - the indices of the column found for each row (or NULL if not needed)
4736
4737 Level: intermediate
4738
4739 Notes:
4740 if a row is completely empty or has only 0.0 values then the idx[] value for that
4741 row is 0 (the first column).
4742
4743 This code is only implemented for a couple of matrix formats.
4744
4745.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMax(), MatGetRowMin()
4746@*/
4747PetscErrorCode MatGetRowMaxAbs(Mat mat,Vec v,PetscInt idx[])
4748{
4749 PetscErrorCode ierr;
4750
4751 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4751; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4752 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4752,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4752,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4752,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4752,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4753 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4753,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4754 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4754,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4754,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4755 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4755,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4756 if (!mat->ops->getrowmaxabs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),4756,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
4757 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4757,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4758 if (idx) {ierr = PetscArrayzero(idx,mat->rmap->n)PetscMemzero(idx,(mat->rmap->n)*sizeof(*(idx)));;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4758,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4759
4760 ierr = (*mat->ops->getrowmaxabs)(mat,v,idx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4760,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4761 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4762 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4763}
4764
4765/*@
4766 MatGetRowSum - Gets the sum of each row of the matrix
4767
4768 Logically or Neighborhood Collective on Mat
4769
4770 Input Parameters:
4771. mat - the matrix
4772
4773 Output Parameter:
4774. v - the vector for storing the sum of rows
4775
4776 Level: intermediate
4777
4778 Notes:
4779 This code is slow since it is not currently specialized for different formats
4780
4781.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMax(), MatGetRowMin()
4782@*/
4783PetscErrorCode MatGetRowSum(Mat mat, Vec v)
4784{
4785 Vec ones;
4786 PetscErrorCode ierr;
4787
4788 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4788; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4789 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4789,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4789,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4789,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4789,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4790 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4790,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4791 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4791,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4791,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4791,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4791,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4792 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4792,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4793 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4793,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4794 ierr = MatCreateVecs(mat,&ones,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4794,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4795 ierr = VecSet(ones,1.);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4795,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4796 ierr = MatMult(mat,ones,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4796,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4797 ierr = VecDestroy(&ones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4797,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4798 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4799}
4800
4801/*@
4802 MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
4803
4804 Collective on Mat
4805
4806 Input Parameter:
4807+ mat - the matrix to transpose
4808- reuse - either MAT_INITIAL_MATRIX, MAT_REUSE_MATRIX, or MAT_INPLACE_MATRIX
4809
4810 Output Parameters:
4811. B - the transpose
4812
4813 Notes:
4814 If you use MAT_INPLACE_MATRIX then you must pass in &mat for B
4815
4816 MAT_REUSE_MATRIX causes the B matrix from a previous call to this function with MAT_INITIAL_MATRIX to be used
4817
4818 Consider using MatCreateTranspose() instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
4819
4820 Level: intermediate
4821
4822.seealso: MatMultTranspose(), MatMultTransposeAdd(), MatIsTranspose(), MatReuse
4823@*/
4824PetscErrorCode MatTranspose(Mat mat,MatReuse reuse,Mat *B)
4825{
4826 PetscErrorCode ierr;
4827
4828 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4828; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4829 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4829,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4829,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4829,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4829,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4830 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4830,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4831 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4831,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4832 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4832,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4833 if (!mat->ops->transpose) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4833,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
4834 if (reuse == MAT_INPLACE_MATRIX && mat != *B) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_INPLACE_MATRIX requires last matrix to match first")return PetscError(PetscObjectComm((PetscObject)mat),4834,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MAT_INPLACE_MATRIX requires last matrix to match first")
;
4835 if (reuse == MAT_REUSE_MATRIX && mat == *B) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Perhaps you mean MAT_INPLACE_MATRIX")return PetscError(PetscObjectComm((PetscObject)mat),4835,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Perhaps you mean MAT_INPLACE_MATRIX")
;
4836 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4836,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4837
4838 ierr = PetscLogEventBegin(MAT_Transpose,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Transpose].active) ? (*PetscLogPLB)((MAT_Transpose),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4838,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4839 ierr = (*mat->ops->transpose)(mat,reuse,B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4839,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4840 ierr = PetscLogEventEnd(MAT_Transpose,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Transpose].active) ? (*PetscLogPLE)((MAT_Transpose),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4840,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4841 if (B) {ierr = PetscObjectStateIncrease((PetscObject)*B)(((PetscObject)*B)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4841,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4842 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4843}
4844
4845/*@
4846 MatIsTranspose - Test whether a matrix is another one's transpose,
4847 or its own, in which case it tests symmetry.
4848
4849 Collective on Mat
4850
4851 Input Parameter:
4852+ A - the matrix to test
4853- B - the matrix to test against, this can equal the first parameter
4854
4855 Output Parameters:
4856. flg - the result
4857
4858 Notes:
4859 Only available for SeqAIJ/MPIAIJ matrices. The sequential algorithm
4860 has a running time of the order of the number of nonzeros; the parallel
4861 test involves parallel copies of the block-offdiagonal parts of the matrix.
4862
4863 Level: intermediate
4864
4865.seealso: MatTranspose(), MatIsSymmetric(), MatIsHermitian()
4866@*/
4867PetscErrorCode MatIsTranspose(Mat A,Mat B,PetscReal tol,PetscBool *flg)
4868{
4869 PetscErrorCode ierr,(*f)(Mat,Mat,PetscReal,PetscBool*),(*g)(Mat,Mat,PetscReal,PetscBool*);
4870
4871 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4871; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4872 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),4872,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4872,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4872,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4872,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4873 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),4873,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4873,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4873,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4873,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4874 PetscValidPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),4874,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),4874,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
4875 ierr = PetscObjectQueryFunction((PetscObject)A,"MatIsTranspose_C",&f)PetscObjectQueryFunction_Private(((PetscObject)A),("MatIsTranspose_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4875,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4876 ierr = PetscObjectQueryFunction((PetscObject)B,"MatIsTranspose_C",&g)PetscObjectQueryFunction_Private(((PetscObject)B),("MatIsTranspose_C"
),(PetscVoidFunction*)(&g))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4876,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4877 *flg = PETSC_FALSE;
4878 if (f && g) {
4879 if (f == g) {
4880 ierr = (*f)(A,B,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4880,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4881 } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_NOTSAMETYPE,"Matrices do not have the same comparator for symmetry test")return PetscError(PetscObjectComm((PetscObject)A),4881,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",69,PETSC_ERROR_INITIAL
,"Matrices do not have the same comparator for symmetry test"
)
;
4882 } else {
4883 MatType mattype;
4884 if (!f) {
4885 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4885,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4886 } else {
4887 ierr = MatGetType(B,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4887,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4888 }
4889 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for transpose",mattype)return PetscError(((MPI_Comm)0x44000001),4889,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for transpose"
,mattype)
;
4890 }
4891 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4892}
4893
4894/*@
4895 MatHermitianTranspose - Computes an in-place or out-of-place transpose of a matrix in complex conjugate.
4896
4897 Collective on Mat
4898
4899 Input Parameter:
4900+ mat - the matrix to transpose and complex conjugate
4901- reuse - MAT_INITIAL_MATRIX to create a new matrix, MAT_INPLACE_MATRIX to reuse the first argument to store the transpose
4902
4903 Output Parameters:
4904. B - the Hermitian
4905
4906 Level: intermediate
4907
4908.seealso: MatTranspose(), MatMultTranspose(), MatMultTransposeAdd(), MatIsTranspose(), MatReuse
4909@*/
4910PetscErrorCode MatHermitianTranspose(Mat mat,MatReuse reuse,Mat *B)
4911{
4912 PetscErrorCode ierr;
4913
4914 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4914; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4915 ierr = MatTranspose(mat,reuse,B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4915,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4916#if defined(PETSC_USE_COMPLEX)
4917 ierr = MatConjugate(*B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4917,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4918#endif
4919 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4920}
4921
4922/*@
4923 MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
4924
4925 Collective on Mat
4926
4927 Input Parameter:
4928+ A - the matrix to test
4929- B - the matrix to test against, this can equal the first parameter
4930
4931 Output Parameters:
4932. flg - the result
4933
4934 Notes:
4935 Only available for SeqAIJ/MPIAIJ matrices. The sequential algorithm
4936 has a running time of the order of the number of nonzeros; the parallel
4937 test involves parallel copies of the block-offdiagonal parts of the matrix.
4938
4939 Level: intermediate
4940
4941.seealso: MatTranspose(), MatIsSymmetric(), MatIsHermitian(), MatIsTranspose()
4942@*/
4943PetscErrorCode MatIsHermitianTranspose(Mat A,Mat B,PetscReal tol,PetscBool *flg)
4944{
4945 PetscErrorCode ierr,(*f)(Mat,Mat,PetscReal,PetscBool*),(*g)(Mat,Mat,PetscReal,PetscBool*);
4946
4947 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4947; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4948 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),4948,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4948,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4948,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4948,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4949 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),4949,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4949,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4949,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4949,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4950 PetscValidPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),4950,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),4950,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
4951 ierr = PetscObjectQueryFunction((PetscObject)A,"MatIsHermitianTranspose_C",&f)PetscObjectQueryFunction_Private(((PetscObject)A),("MatIsHermitianTranspose_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4951,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4952 ierr = PetscObjectQueryFunction((PetscObject)B,"MatIsHermitianTranspose_C",&g)PetscObjectQueryFunction_Private(((PetscObject)B),("MatIsHermitianTranspose_C"
),(PetscVoidFunction*)(&g))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4952,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4953 if (f && g) {
4954 if (f==g) {
4955 ierr = (*f)(A,B,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4955,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4956 } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_NOTSAMETYPE,"Matrices do not have the same comparator for Hermitian test")return PetscError(PetscObjectComm((PetscObject)A),4956,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",69,PETSC_ERROR_INITIAL
,"Matrices do not have the same comparator for Hermitian test"
)
;
4957 }
4958 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4959}
4960
4961/*@
4962 MatPermute - Creates a new matrix with rows and columns permuted from the
4963 original.
4964
4965 Collective on Mat
4966
4967 Input Parameters:
4968+ mat - the matrix to permute
4969. row - row permutation, each processor supplies only the permutation for its rows
4970- col - column permutation, each processor supplies only the permutation for its columns
4971
4972 Output Parameters:
4973. B - the permuted matrix
4974
4975 Level: advanced
4976
4977 Note:
4978 The index sets map from row/col of permuted matrix to row/col of original matrix.
4979 The index sets should be on the same communicator as Mat and have the same local sizes.
4980
4981.seealso: MatGetOrdering(), ISAllGather()
4982
4983@*/
4984PetscErrorCode MatPermute(Mat mat,IS row,IS col,Mat *B)
4985{
4986 PetscErrorCode ierr;
4987
4988 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4988; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4989 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4989,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4989,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4989,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4989,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4990 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4990,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4991 PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),4991,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4991,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4991,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4991,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4992 PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),4992,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4992,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4992,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),4992,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
4993 PetscValidPointer(B,4)do { if (!B) return PetscError(((MPI_Comm)0x44000001),4993,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",4); if (!PetscCheckPointer(B,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),4993,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",4); } while (0)
;
4994 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4994,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4995 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4995,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4996 if (!mat->ops->permute) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatPermute not available for Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),4996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"MatPermute not available for Mat type %s"
,((PetscObject)mat)->type_name)
;
4997 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4997,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4998
4999 ierr = (*mat->ops->permute)(mat,row,col,B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4999,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5000 ierr = PetscObjectStateIncrease((PetscObject)*B)(((PetscObject)*B)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5000,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5001 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5002}
5003
5004/*@
5005 MatEqual - Compares two matrices.
5006
5007 Collective on Mat
5008
5009 Input Parameters:
5010+ A - the first matrix
5011- B - the second matrix
5012
5013 Output Parameter:
5014. flg - PETSC_TRUE if the matrices are equal; PETSC_FALSE otherwise.
5015
5016 Level: intermediate
5017
5018@*/
5019PetscErrorCode MatEqual(Mat A,Mat B,PetscBool *flg)
5020{
5021 PetscErrorCode ierr;
5022
5023 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5023; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5024 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),5024,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),5024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5024,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5025 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),5025,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),5025,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5025,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),5025,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
5026 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),5026,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
5027 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),5027,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
5028 PetscValidIntPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),5028,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),5028,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
5029 PetscCheckSameComm(A,1,B,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)B),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),5029,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),5029,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
5030 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5030,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
5031 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),5031,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5032 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),5032,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5033 if (A->rmap->N != B->rmap->N || A->cmap->N != B->cmap->N) SETERRQ4(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %D %D %D %D",A->rmap->N,B->rmap->N,A->cmap->N,B->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),5033,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat B: global dim %D %D %D %D",A->rmap->N,B->
rmap->N,A->cmap->N,B->cmap->N)
;
5034 if (!A->ops->equal) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),5034,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)A)->type_name)
;
5035 if (!B->ops->equal) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type %s",((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),5035,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)B)->type_name)
;
5036 if (A->ops->equal != B->ops->equal) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"A is type: %s\nB is type: %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),5036,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"A is type: %s\nB is type: %s",((PetscObject)A)->type_name
,((PetscObject)B)->type_name)
;
5037 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5037,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
5038
5039 ierr = (*A->ops->equal)(A,B,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5039,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5040 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5041}
5042
5043/*@
5044 MatDiagonalScale - Scales a matrix on the left and right by diagonal
5045 matrices that are stored as vectors. Either of the two scaling
5046 matrices can be NULL.
5047
5048 Collective on Mat
5049
5050 Input Parameters:
5051+ mat - the matrix to be scaled
5052. l - the left scaling vector (or NULL)
5053- r - the right scaling vector (or NULL)
5054
5055 Notes:
5056 MatDiagonalScale() computes A = LAR, where
5057 L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5058 The L scales the rows of the matrix, the R scales the columns of the matrix.
5059
5060 Level: intermediate
5061
5062
5063.seealso: MatScale(), MatShift(), MatDiagonalSet()
5064@*/
5065PetscErrorCode MatDiagonalScale(Mat mat,Vec l,Vec r)
5066{
5067 PetscErrorCode ierr;
5068
5069 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5069; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5070 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5070,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5070,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5070,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5070,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5071 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5071,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5072 if (!mat->ops->diagonalscale) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5072,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5073 if (l) {PetscValidHeaderSpecific(l,VEC_CLASSID,2)do { if (!l) return PetscError(((MPI_Comm)0x44000001),5073,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(l,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),5073,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(l))->classid != VEC_CLASSID) { if (
((PetscObject)(l))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5073,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),5073,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;PetscCheckSameComm(mat,1,l,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)l),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),5073,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),5073,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;}
5074 if (r) {PetscValidHeaderSpecific(r,VEC_CLASSID,3)do { if (!r) return PetscError(((MPI_Comm)0x44000001),5074,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(r,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),5074,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(r))->classid != VEC_CLASSID) { if (
((PetscObject)(r))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5074,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),5074,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;PetscCheckSameComm(mat,1,r,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)r),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),5074,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),5074,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;}
5075 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5075,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5076 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5076,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5077 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5077,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5078
5079 ierr = PetscLogEventBegin(MAT_Scale,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLB)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5079,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5080 ierr = (*mat->ops->diagonalscale)(mat,l,r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5080,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5081 ierr = PetscLogEventEnd(MAT_Scale,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLE)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5081,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5082 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5082,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5083#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
5084 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
5085 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
5086 }
5087#endif
5088 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5089}
5090
5091/*@
5092 MatScale - Scales all elements of a matrix by a given number.
5093
5094 Logically Collective on Mat
5095
5096 Input Parameters:
5097+ mat - the matrix to be scaled
5098- a - the scaling value
5099
5100 Output Parameter:
5101. mat - the scaled matrix
5102
5103 Level: intermediate
5104
5105.seealso: MatDiagonalScale()
5106@*/
5107PetscErrorCode MatScale(Mat mat,PetscScalar a)
5108{
5109 PetscErrorCode ierr;
5110
5111 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5111; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5112 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5112,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5112,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5112,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5112,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5113 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5113,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5114 if (a != (PetscScalar)1.0 && !mat->ops->scale) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),5114,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
5115 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5115,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5116 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5116,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5117 PetscValidLogicalCollectiveScalar(mat,a,2)do { PetscErrorCode _7_ierr; PetscReal b1[5],b2[5]; if (PetscIsNanScalar
(a)) {b1[4] = 1;} else {b1[4] = 0;}; b1[0] = -(a); b1[1] = (a
);b1[2] = -((PetscReal)0.); b1[3] = ((PetscReal)0.); _7_ierr =
((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(5),(((MPI_Datatype
)0x4c00080b)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),5117,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (!(b2[4] >
0) && !(PetscEqualReal(-b2[0],b2[1]) && PetscEqualReal
(-b2[2],b2[3]))) return PetscError(PetscObjectComm((PetscObject
)mat),5117,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Scalar value must be same on all processes, argument # %d"
,2); } while (0)
;
5118 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5118,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5119
5120 ierr = PetscLogEventBegin(MAT_Scale,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLB)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5120,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5121 if (a != (PetscScalar)1.0) {
5122 ierr = (*mat->ops->scale)(mat,a);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5122,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5123 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5123,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5124#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
5125 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
5126 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
5127 }
5128#endif
5129 }
5130 ierr = PetscLogEventEnd(MAT_Scale,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLE)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5130,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5131 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5132}
5133
5134/*@
5135 MatNorm - Calculates various norms of a matrix.
5136
5137 Collective on Mat
5138
5139 Input Parameters:
5140+ mat - the matrix
5141- type - the type of norm, NORM_1, NORM_FROBENIUS, NORM_INFINITY
5142
5143 Output Parameters:
5144. nrm - the resulting norm
5145
5146 Level: intermediate
5147
5148@*/
5149PetscErrorCode MatNorm(Mat mat,NormType type,PetscReal *nrm)
5150{
5151 PetscErrorCode ierr;
5152
5153 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5153; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5154 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5154,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5154,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5154,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5154,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5155 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5155,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5156 PetscValidScalarPointer(nrm,3)do { if (!nrm) return PetscError(((MPI_Comm)0x44000001),5156,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(nrm,PETSC_DOUBLE)) return PetscError(((MPI_Comm
)0x44000001),5156,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,3); } while (0)
;
5157
5158 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5158,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5159 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5159,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5160 if (!mat->ops->norm) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5160,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5161 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5161,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5162
5163 ierr = (*mat->ops->norm)(mat,type,nrm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5163,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5164 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5165}
5166
5167/*
5168 This variable is used to prevent counting of MatAssemblyBegin() that
5169 are called from within a MatAssemblyEnd().
5170*/
5171static PetscInt MatAssemblyEnd_InUse = 0;
5172/*@
5173 MatAssemblyBegin - Begins assembling the matrix. This routine should
5174 be called after completing all calls to MatSetValues().
5175
5176 Collective on Mat
5177
5178 Input Parameters:
5179+ mat - the matrix
5180- type - type of assembly, either MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY
5181
5182 Notes:
5183 MatSetValues() generally caches the values. The matrix is ready to
5184 use only after MatAssemblyBegin() and MatAssemblyEnd() have been called.
5185 Use MAT_FLUSH_ASSEMBLY when switching between ADD_VALUES and INSERT_VALUES
5186 in MatSetValues(); use MAT_FINAL_ASSEMBLY for the final assembly before
5187 using the matrix.
5188
5189 ALL processes that share a matrix MUST call MatAssemblyBegin() and MatAssemblyEnd() the SAME NUMBER of times, and each time with the
5190 same flag of MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY for all processes. Thus you CANNOT locally change from ADD_VALUES to INSERT_VALUES, that is
5191 a global collective operation requring all processes that share the matrix.
5192
5193 Space for preallocated nonzeros that is not filled by a call to MatSetValues() or a related routine are compressed
5194 out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5195 before MAT_FINAL_ASSEMBLY so the space is not compressed out.
5196
5197 Level: beginner
5198
5199.seealso: MatAssemblyEnd(), MatSetValues(), MatAssembled()
5200@*/
5201PetscErrorCode MatAssemblyBegin(Mat mat,MatAssemblyType type)
5202{
5203 PetscErrorCode ierr;
5204
5205 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5205; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5206 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5206,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5206,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5207 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5207,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5208 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5208,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5209 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix.\nDid you forget to call MatSetUnfactored()?")return PetscError(PetscObjectComm((PetscObject)mat),5209,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix.\nDid you forget to call MatSetUnfactored()?"
)
;
5210 if (mat->assembled) {
5211 mat->was_assembled = PETSC_TRUE;
5212 mat->assembled = PETSC_FALSE;
5213 }
5214
5215 if (!MatAssemblyEnd_InUse) {
5216 ierr = PetscLogEventBegin(MAT_AssemblyBegin,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_AssemblyBegin].active) ? (*PetscLogPLB)((MAT_AssemblyBegin
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5216,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5217 if (mat->ops->assemblybegin) {ierr = (*mat->ops->assemblybegin)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5217,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
5218 ierr = PetscLogEventEnd(MAT_AssemblyBegin,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_AssemblyBegin].active) ? (*PetscLogPLE)((MAT_AssemblyBegin
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5218,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5219 } else if (mat->ops->assemblybegin) {
5220 ierr = (*mat->ops->assemblybegin)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5220,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5221 }
5222 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5223}
5224
5225/*@
5226 MatAssembled - Indicates if a matrix has been assembled and is ready for
5227 use; for example, in matrix-vector product.
5228
5229 Not Collective
5230
5231 Input Parameter:
5232. mat - the matrix
5233
5234 Output Parameter:
5235. assembled - PETSC_TRUE or PETSC_FALSE
5236
5237 Level: advanced
5238
5239.seealso: MatAssemblyEnd(), MatSetValues(), MatAssemblyBegin()
5240@*/
5241PetscErrorCode MatAssembled(Mat mat,PetscBool *assembled)
5242{
5243 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5243; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5244 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5244,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5244,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5245 PetscValidPointer(assembled,2)do { if (!assembled) return PetscError(((MPI_Comm)0x44000001)
,5245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(assembled,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),5245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
5246 *assembled = mat->assembled;
5247 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5248}
5249
5250/*@
5251 MatAssemblyEnd - Completes assembling the matrix. This routine should
5252 be called after MatAssemblyBegin().
5253
5254 Collective on Mat
5255
5256 Input Parameters:
5257+ mat - the matrix
5258- type - type of assembly, either MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY
5259
5260 Options Database Keys:
5261+ -mat_view ::ascii_info - Prints info on matrix at conclusion of MatEndAssembly()
5262. -mat_view ::ascii_info_detail - Prints more detailed info
5263. -mat_view - Prints matrix in ASCII format
5264. -mat_view ::ascii_matlab - Prints matrix in Matlab format
5265. -mat_view draw - PetscDraws nonzero structure of matrix, using MatView() and PetscDrawOpenX().
5266. -display <name> - Sets display name (default is host)
5267. -draw_pause <sec> - Sets number of seconds to pause after display
5268. -mat_view socket - Sends matrix to socket, can be accessed from Matlab (See Users-Manual: ch_matlab )
5269. -viewer_socket_machine <machine> - Machine to use for socket
5270. -viewer_socket_port <port> - Port number to use for socket
5271- -mat_view binary:filename[:append] - Save matrix to file in binary format
5272
5273 Notes:
5274 MatSetValues() generally caches the values. The matrix is ready to
5275 use only after MatAssemblyBegin() and MatAssemblyEnd() have been called.
5276 Use MAT_FLUSH_ASSEMBLY when switching between ADD_VALUES and INSERT_VALUES
5277 in MatSetValues(); use MAT_FINAL_ASSEMBLY for the final assembly before
5278 using the matrix.
5279
5280 Space for preallocated nonzeros that is not filled by a call to MatSetValues() or a related routine are compressed
5281 out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5282 before MAT_FINAL_ASSEMBLY so the space is not compressed out.
5283
5284 Level: beginner
5285
5286.seealso: MatAssemblyBegin(), MatSetValues(), PetscDrawOpenX(), PetscDrawCreate(), MatView(), MatAssembled(), PetscViewerSocketOpen()
5287@*/
5288PetscErrorCode MatAssemblyEnd(Mat mat,MatAssemblyType type)
5289{
5290 PetscErrorCode ierr;
5291 static PetscInt inassm = 0;
5292 PetscBool flg = PETSC_FALSE;
5293
5294 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5294; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5295 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5295,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5295,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5295,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5295,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5296 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5296,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5297
5298 inassm++;
5299 MatAssemblyEnd_InUse++;
5300 if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5301 ierr = PetscLogEventBegin(MAT_AssemblyEnd,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_AssemblyEnd].active) ? (*PetscLogPLB)((MAT_AssemblyEnd),
0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5301,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5302 if (mat->ops->assemblyend) {
5303 ierr = (*mat->ops->assemblyend)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5304 }
5305 ierr = PetscLogEventEnd(MAT_AssemblyEnd,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_AssemblyEnd].active) ? (*PetscLogPLE)((MAT_AssemblyEnd),
0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5305,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5306 } else if (mat->ops->assemblyend) {
5307 ierr = (*mat->ops->assemblyend)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5307,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5308 }
5309
5310 /* Flush assembly is not a true assembly */
5311 if (type != MAT_FLUSH_ASSEMBLY) {
5312 mat->assembled = PETSC_TRUE;
5313 mat->num_ass++;
5314 mat->ass_nonzerostate = mat->nonzerostate;
5315 }
5316
5317 mat->insertmode = NOT_SET_VALUES;
5318 MatAssemblyEnd_InUse--;
5319 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5319,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5320 if (!mat->symmetric_eternal) {
5321 mat->symmetric_set = PETSC_FALSE;
5322 mat->hermitian_set = PETSC_FALSE;
5323 mat->structurally_symmetric_set = PETSC_FALSE;
5324 }
5325#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
5326 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
5327 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
5328 }
5329#endif
5330 if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5331 ierr = MatViewFromOptions(mat,NULL((void*)0),"-mat_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5331,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5332
5333 if (mat->checksymmetryonassembly) {
5334 ierr = MatIsSymmetric(mat,mat->checksymmetrytol,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5335 if (flg) {
5336 ierr = PetscPrintf(PetscObjectComm((PetscObject)mat),"Matrix is symmetric (tolerance %g)\n",(double)mat->checksymmetrytol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5336,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5337 } else {
5338 ierr = PetscPrintf(PetscObjectComm((PetscObject)mat),"Matrix is not symmetric (tolerance %g)\n",(double)mat->checksymmetrytol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5338,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5339 }
5340 }
5341 if (mat->nullsp && mat->checknullspaceonassembly) {
5342 ierr = MatNullSpaceTest(mat->nullsp,mat,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5342,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5343 }
5344 }
5345 inassm--;
5346 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5347}
5348
5349/*@
5350 MatSetOption - Sets a parameter option for a matrix. Some options
5351 may be specific to certain storage formats. Some options
5352 determine how values will be inserted (or added). Sorted,
5353 row-oriented input will generally assemble the fastest. The default
5354 is row-oriented.
5355
5356 Logically Collective on Mat for certain operations, such as MAT_SPD, not collective for MAT_ROW_ORIENTED, see MatOption
5357
5358 Input Parameters:
5359+ mat - the matrix
5360. option - the option, one of those listed below (and possibly others),
5361- flg - turn the option on (PETSC_TRUE) or off (PETSC_FALSE)
5362
5363 Options Describing Matrix Structure:
5364+ MAT_SPD - symmetric positive definite
5365. MAT_SYMMETRIC - symmetric in terms of both structure and value
5366. MAT_HERMITIAN - transpose is the complex conjugation
5367. MAT_STRUCTURALLY_SYMMETRIC - symmetric nonzero structure
5368- MAT_SYMMETRY_ETERNAL - if you would like the symmetry/Hermitian flag
5369 you set to be kept with all future use of the matrix
5370 including after MatAssemblyBegin/End() which could
5371 potentially change the symmetry structure, i.e. you
5372 KNOW the matrix will ALWAYS have the property you set.
5373
5374
5375 Options For Use with MatSetValues():
5376 Insert a logically dense subblock, which can be
5377. MAT_ROW_ORIENTED - row-oriented (default)
5378
5379 Note these options reflect the data you pass in with MatSetValues(); it has
5380 nothing to do with how the data is stored internally in the matrix
5381 data structure.
5382
5383 When (re)assembling a matrix, we can restrict the input for
5384 efficiency/debugging purposes. These options include:
5385+ MAT_NEW_NONZERO_LOCATIONS - additional insertions will be allowed if they generate a new nonzero (slow)
5386. MAT_NEW_DIAGONALS - new diagonals will be allowed (for block diagonal format only)
5387. MAT_IGNORE_OFF_PROC_ENTRIES - drops off-processor entries
5388. MAT_NEW_NONZERO_LOCATION_ERR - generates an error for new matrix entry
5389. MAT_USE_HASH_TABLE - uses a hash table to speed up matrix assembly
5390. MAT_NO_OFF_PROC_ENTRIES - you know each process will only set values for its own rows, will generate an error if
5391 any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5392 performance for very large process counts.
5393- MAT_SUBSET_OFF_PROC_ENTRIES - you know that the first assembly after setting this flag will set a superset
5394 of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5395 functions, instead sending only neighbor messages.
5396
5397 Notes:
5398 Except for MAT_UNUSED_NONZERO_LOCATION_ERR and MAT_ROW_ORIENTED all processes that share the matrix must pass the same value in flg!
5399
5400 Some options are relevant only for particular matrix types and
5401 are thus ignored by others. Other options are not supported by
5402 certain matrix types and will generate an error message if set.
5403
5404 If using a Fortran 77 module to compute a matrix, one may need to
5405 use the column-oriented option (or convert to the row-oriented
5406 format).
5407
5408 MAT_NEW_NONZERO_LOCATIONS set to PETSC_FALSE indicates that any add or insertion
5409 that would generate a new entry in the nonzero structure is instead
5410 ignored. Thus, if memory has not alredy been allocated for this particular
5411 data, then the insertion is ignored. For dense matrices, in which
5412 the entire array is allocated, no entries are ever ignored.
5413 Set after the first MatAssemblyEnd(). If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5414
5415 MAT_NEW_NONZERO_LOCATION_ERR set to PETSC_TRUE indicates that any add or insertion
5416 that would generate a new entry in the nonzero structure instead produces
5417 an error. (Currently supported for AIJ and BAIJ formats only.) If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5418
5419 MAT_NEW_NONZERO_ALLOCATION_ERR set to PETSC_TRUE indicates that any add or insertion
5420 that would generate a new entry that has not been preallocated will
5421 instead produce an error. (Currently supported for AIJ and BAIJ formats
5422 only.) This is a useful flag when debugging matrix memory preallocation.
5423 If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5424
5425 MAT_IGNORE_OFF_PROC_ENTRIES set to PETSC_TRUE indicates entries destined for
5426 other processors should be dropped, rather than stashed.
5427 This is useful if you know that the "owning" processor is also
5428 always generating the correct matrix entries, so that PETSc need
5429 not transfer duplicate entries generated on another processor.
5430
5431 MAT_USE_HASH_TABLE indicates that a hash table be used to improve the
5432 searches during matrix assembly. When this flag is set, the hash table
5433 is created during the first Matrix Assembly. This hash table is
5434 used the next time through, during MatSetVaules()/MatSetVaulesBlocked()
5435 to improve the searching of indices. MAT_NEW_NONZERO_LOCATIONS flag
5436 should be used with MAT_USE_HASH_TABLE flag. This option is currently
5437 supported by MATMPIBAIJ format only.
5438
5439 MAT_KEEP_NONZERO_PATTERN indicates when MatZeroRows() is called the zeroed entries
5440 are kept in the nonzero structure
5441
5442 MAT_IGNORE_ZERO_ENTRIES - for AIJ/IS matrices this will stop zero values from creating
5443 a zero location in the matrix
5444
5445 MAT_USE_INODES - indicates using inode version of the code - works with AIJ matrix types
5446
5447 MAT_NO_OFF_PROC_ZERO_ROWS - you know each process will only zero its own rows. This avoids all reductions in the
5448 zero row routines and thus improves performance for very large process counts.
5449
5450 MAT_IGNORE_LOWER_TRIANGULAR - For SBAIJ matrices will ignore any insertions you make in the lower triangular
5451 part of the matrix (since they should match the upper triangular part).
5452
5453 MAT_SORTED_FULL - each process provides exactly its local rows; all column indices for a given row are passed in a
5454 single call to MatSetValues(), preallocation is perfect, row oriented, INSERT_VALUES is used. Common
5455 with finite difference schemes with non-periodic boundary conditions.
5456 Notes:
5457 Can only be called after MatSetSizes() and MatSetType() have been set.
5458
5459 Level: intermediate
5460
5461.seealso: MatOption, Mat
5462
5463@*/
5464PetscErrorCode MatSetOption(Mat mat,MatOption op,PetscBool flg)
5465{
5466 PetscErrorCode ierr;
5467
5468 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5468; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5469 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5469,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5469,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5469,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5469,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5470 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5470,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5471 if (op > 0) {
5472 PetscValidLogicalCollectiveEnum(mat,op,2)do { PetscErrorCode _7_ierr; PetscMPIInt b1[2],b2[2]; b1[0] =
-(PetscMPIInt)op; b1[1] = (PetscMPIInt)op; _7_ierr = (PetscAllreduceBarrierCheck
(PetscObjectComm((PetscObject)mat),2,5472,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),5472,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),5472
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Enum value must be same on all processes, argument # %d"
,2); } while (0)
;
5473 PetscValidLogicalCollectiveBool(mat,flg,3)do { PetscErrorCode _7_ierr; PetscMPIInt b1[2],b2[2]; b1[0] =
-(PetscMPIInt)flg; b1[1] = (PetscMPIInt)flg; _7_ierr = (PetscAllreduceBarrierCheck
(PetscObjectComm((PetscObject)mat),2,5473,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),5473,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),5473
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Bool value must be same on all processes, argument # %d"
,3); } while (0)
;
5474 }
5475
5476 if (((int) op) <= MAT_OPTION_MIN || ((int) op) >= MAT_OPTION_MAX) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Options %d is out of range",(int)op)return PetscError(PetscObjectComm((PetscObject)mat),5476,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Options %d is out of range",(int)op)
;
5477 if (!((PetscObject)mat)->type_name) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_TYPENOTSET,"Cannot set options until type and size have been set, see MatSetType() and MatSetSizes()")return PetscError(PetscObjectComm((PetscObject)mat),5477,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",89,PETSC_ERROR_INITIAL
,"Cannot set options until type and size have been set, see MatSetType() and MatSetSizes()"
)
;
5478
5479 switch (op) {
5480 case MAT_NO_OFF_PROC_ENTRIES:
5481 mat->nooffprocentries = flg;
5482 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5483 break;
5484 case MAT_SUBSET_OFF_PROC_ENTRIES:
5485 mat->assembly_subset = flg;
5486 if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
5487#if !defined(PETSC_HAVE_MPIUNI)
5488 ierr = MatStashScatterDestroy_BTS(&mat->stash);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5488,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5489#endif
5490 mat->stash.first_assembly_done = PETSC_FALSE;
5491 }
5492 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5493 case MAT_NO_OFF_PROC_ZERO_ROWS:
5494 mat->nooffproczerorows = flg;
5495 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5496 break;
5497 case MAT_SPD:
5498 mat->spd_set = PETSC_TRUE;
5499 mat->spd = flg;
5500 if (flg) {
5501 mat->symmetric = PETSC_TRUE;
5502 mat->structurally_symmetric = PETSC_TRUE;
5503 mat->symmetric_set = PETSC_TRUE;
5504 mat->structurally_symmetric_set = PETSC_TRUE;
5505 }
5506 break;
5507 case MAT_SYMMETRIC:
5508 mat->symmetric = flg;
5509 if (flg) mat->structurally_symmetric = PETSC_TRUE;
5510 mat->symmetric_set = PETSC_TRUE;
5511 mat->structurally_symmetric_set = flg;
5512#if !defined(PETSC_USE_COMPLEX)
5513 mat->hermitian = flg;
5514 mat->hermitian_set = PETSC_TRUE;
5515#endif
5516 break;
5517 case MAT_HERMITIAN:
5518 mat->hermitian = flg;
5519 if (flg) mat->structurally_symmetric = PETSC_TRUE;
5520 mat->hermitian_set = PETSC_TRUE;
5521 mat->structurally_symmetric_set = flg;
5522#if !defined(PETSC_USE_COMPLEX)
5523 mat->symmetric = flg;
5524 mat->symmetric_set = PETSC_TRUE;
5525#endif
5526 break;
5527 case MAT_STRUCTURALLY_SYMMETRIC:
5528 mat->structurally_symmetric = flg;
5529 mat->structurally_symmetric_set = PETSC_TRUE;
5530 break;
5531 case MAT_SYMMETRY_ETERNAL:
5532 mat->symmetric_eternal = flg;
5533 break;
5534 case MAT_STRUCTURE_ONLY:
5535 mat->structure_only = flg;
5536 break;
5537 case MAT_SORTED_FULL:
5538 mat->sortedfull = flg;
5539 break;
5540 default:
5541 break;
5542 }
5543 if (mat->ops->setoption) {
5544 ierr = (*mat->ops->setoption)(mat,op,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5544,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5545 }
5546 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5547}
5548
5549/*@
5550 MatGetOption - Gets a parameter option that has been set for a matrix.
5551
5552 Logically Collective on Mat for certain operations, such as MAT_SPD, not collective for MAT_ROW_ORIENTED, see MatOption
5553
5554 Input Parameters:
5555+ mat - the matrix
5556- option - the option, this only responds to certain options, check the code for which ones
5557
5558 Output Parameter:
5559. flg - turn the option on (PETSC_TRUE) or off (PETSC_FALSE)
5560
5561 Notes:
5562 Can only be called after MatSetSizes() and MatSetType() have been set.
5563
5564 Level: intermediate
5565
5566.seealso: MatOption, MatSetOption()
5567
5568@*/
5569PetscErrorCode MatGetOption(Mat mat,MatOption op,PetscBool *flg)
5570{
5571 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5571; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5572 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5572,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5572,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5572,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5572,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5573 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5573,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5574
5575 if (((int) op) <= MAT_OPTION_MIN || ((int) op) >= MAT_OPTION_MAX) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Options %d is out of range",(int)op)return PetscError(PetscObjectComm((PetscObject)mat),5575,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Options %d is out of range",(int)op)
;
5576 if (!((PetscObject)mat)->type_name) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_TYPENOTSET,"Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()")return PetscError(PetscObjectComm((PetscObject)mat),5576,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",89,PETSC_ERROR_INITIAL
,"Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()"
)
;
5577
5578 switch (op) {
5579 case MAT_NO_OFF_PROC_ENTRIES:
5580 *flg = mat->nooffprocentries;
5581 break;
5582 case MAT_NO_OFF_PROC_ZERO_ROWS:
5583 *flg = mat->nooffproczerorows;
5584 break;
5585 case MAT_SYMMETRIC:
5586 *flg = mat->symmetric;
5587 break;
5588 case MAT_HERMITIAN:
5589 *flg = mat->hermitian;
5590 break;
5591 case MAT_STRUCTURALLY_SYMMETRIC:
5592 *flg = mat->structurally_symmetric;
5593 break;
5594 case MAT_SYMMETRY_ETERNAL:
5595 *flg = mat->symmetric_eternal;
5596 break;
5597 case MAT_SPD:
5598 *flg = mat->spd;
5599 break;
5600 default:
5601 break;
5602 }
5603 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5604}
5605
5606/*@
5607 MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
5608 this routine retains the old nonzero structure.
5609
5610 Logically Collective on Mat
5611
5612 Input Parameters:
5613. mat - the matrix
5614
5615 Level: intermediate
5616
5617 Notes:
5618 If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
5619 See the Performance chapter of the users manual for information on preallocating matrices.
5620
5621.seealso: MatZeroRows()
5622@*/
5623PetscErrorCode MatZeroEntries(Mat mat)
5624{
5625 PetscErrorCode ierr;
5626
5627 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5627; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5628 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5628,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5628,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5628,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5628,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5629 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5629,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5630 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5630,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5631 if (mat->insertmode != NOT_SET_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for matrices where you have set values but not yet assembled")return PetscError(((MPI_Comm)0x44000001),5631,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for matrices where you have set values but not yet assembled"
)
;
5632 if (!mat->ops->zeroentries) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5632,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5633 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5633,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5634
5635 ierr = PetscLogEventBegin(MAT_ZeroEntries,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ZeroEntries].active) ? (*PetscLogPLB)((MAT_ZeroEntries),
0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5635,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5636 ierr = (*mat->ops->zeroentries)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5636,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5637 ierr = PetscLogEventEnd(MAT_ZeroEntries,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ZeroEntries].active) ? (*PetscLogPLE)((MAT_ZeroEntries),
0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5637,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5638 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5638,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5639#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
5640 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
5641 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
5642 }
5643#endif
5644 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5645}
5646
5647/*@
5648 MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
5649 of a set of rows and columns of a matrix.
5650
5651 Collective on Mat
5652
5653 Input Parameters:
5654+ mat - the matrix
5655. numRows - the number of rows to remove
5656. rows - the global row indices
5657. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5658. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5659- b - optional vector of right hand side, that will be adjusted by provided solution
5660
5661 Notes:
5662 This does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
5663
5664 The user can set a value in the diagonal entry (or for the AIJ and
5665 row formats can optionally remove the main diagonal entry from the
5666 nonzero structure as well, by passing 0.0 as the final argument).
5667
5668 For the parallel case, all processes that share the matrix (i.e.,
5669 those in the communicator used for matrix creation) MUST call this
5670 routine, regardless of whether any rows being zeroed are owned by
5671 them.
5672
5673 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5674 list only rows local to itself).
5675
5676 The option MAT_NO_OFF_PROC_ZERO_ROWS does not apply to this routine.
5677
5678 Level: intermediate
5679
5680.seealso: MatZeroRowsIS(), MatZeroRows(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5681 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
5682@*/
5683PetscErrorCode MatZeroRowsColumns(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
5684{
5685 PetscErrorCode ierr;
5686
5687 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5687; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5688 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5688,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5688,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5689 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5689,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5690 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),5690
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),5690,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
5691 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5691,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5692 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5692,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5693 if (!mat->ops->zerorowscolumns) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5693,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5694 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5694,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5695
5696 ierr = (*mat->ops->zerorowscolumns)(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5696,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5697 ierr = MatViewFromOptions(mat,NULL((void*)0),"-mat_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5697,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5698 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5698,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5699#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
5700 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
5701 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
5702 }
5703#endif
5704 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5705}
5706
5707/*@
5708 MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
5709 of a set of rows and columns of a matrix.
5710
5711 Collective on Mat
5712
5713 Input Parameters:
5714+ mat - the matrix
5715. is - the rows to zero
5716. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5717. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5718- b - optional vector of right hand side, that will be adjusted by provided solution
5719
5720 Notes:
5721 This does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
5722
5723 The user can set a value in the diagonal entry (or for the AIJ and
5724 row formats can optionally remove the main diagonal entry from the
5725 nonzero structure as well, by passing 0.0 as the final argument).
5726
5727 For the parallel case, all processes that share the matrix (i.e.,
5728 those in the communicator used for matrix creation) MUST call this
5729 routine, regardless of whether any rows being zeroed are owned by
5730 them.
5731
5732 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5733 list only rows local to itself).
5734
5735 The option MAT_NO_OFF_PROC_ZERO_ROWS does not apply to this routine.
5736
5737 Level: intermediate
5738
5739.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5740 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRows(), MatZeroRowsColumnsStencil()
5741@*/
5742PetscErrorCode MatZeroRowsColumnsIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
5743{
5744 PetscErrorCode ierr;
5745 PetscInt numRows;
5746 const PetscInt *rows;
5747
5748 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5748; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5749 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5749,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5749,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5749,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5749,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5750 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),5750,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),5750,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5750,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),5750,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
5751 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5751,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5752 PetscValidType(is,2)do { if (!((PetscObject)is)->type_name) return PetscError(
((MPI_Comm)0x44000001),5752,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)is)->class_name,2); } while (0)
;
5753 ierr = ISGetLocalSize(is,&numRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5753,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5754 ierr = ISGetIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5755 ierr = MatZeroRowsColumns(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5755,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5756 ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5756,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5757 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5758}
5759
5760/*@
5761 MatZeroRows - Zeros all entries (except possibly the main diagonal)
5762 of a set of rows of a matrix.
5763
5764 Collective on Mat
5765
5766 Input Parameters:
5767+ mat - the matrix
5768. numRows - the number of rows to remove
5769. rows - the global row indices
5770. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5771. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5772- b - optional vector of right hand side, that will be adjusted by provided solution
5773
5774 Notes:
5775 For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
5776 but does not release memory. For the dense and block diagonal
5777 formats this does not alter the nonzero structure.
5778
5779 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
5780 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
5781 merely zeroed.
5782
5783 The user can set a value in the diagonal entry (or for the AIJ and
5784 row formats can optionally remove the main diagonal entry from the
5785 nonzero structure as well, by passing 0.0 as the final argument).
5786
5787 For the parallel case, all processes that share the matrix (i.e.,
5788 those in the communicator used for matrix creation) MUST call this
5789 routine, regardless of whether any rows being zeroed are owned by
5790 them.
5791
5792 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5793 list only rows local to itself).
5794
5795 You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
5796 owns that are to be zeroed. This saves a global synchronization in the implementation.
5797
5798 Level: intermediate
5799
5800.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5801 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
5802@*/
5803PetscErrorCode MatZeroRows(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
5804{
5805 PetscErrorCode ierr;
5806
5807 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5807; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5808 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5808,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5808,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5809 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5809,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5810 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),5810
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),5810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
5811 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5811,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5812 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5812,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5813 if (!mat->ops->zerorows) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5813,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5814 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5814,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5815
5816 ierr = (*mat->ops->zerorows)(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5816,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5817 ierr = MatViewFromOptions(mat,NULL((void*)0),"-mat_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5817,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5818 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5818,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5819#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
5820 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
5821 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
5822 }
5823#endif
5824 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5825}
5826
5827/*@
5828 MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
5829 of a set of rows of a matrix.
5830
5831 Collective on Mat
5832
5833 Input Parameters:
5834+ mat - the matrix
5835. is - index set of rows to remove
5836. diag - value put in all diagonals of eliminated rows
5837. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5838- b - optional vector of right hand side, that will be adjusted by provided solution
5839
5840 Notes:
5841 For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
5842 but does not release memory. For the dense and block diagonal
5843 formats this does not alter the nonzero structure.
5844
5845 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
5846 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
5847 merely zeroed.
5848
5849 The user can set a value in the diagonal entry (or for the AIJ and
5850 row formats can optionally remove the main diagonal entry from the
5851 nonzero structure as well, by passing 0.0 as the final argument).
5852
5853 For the parallel case, all processes that share the matrix (i.e.,
5854 those in the communicator used for matrix creation) MUST call this
5855 routine, regardless of whether any rows being zeroed are owned by
5856 them.
5857
5858 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5859 list only rows local to itself).
5860
5861 You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
5862 owns that are to be zeroed. This saves a global synchronization in the implementation.
5863
5864 Level: intermediate
5865
5866.seealso: MatZeroRows(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5867 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
5868@*/
5869PetscErrorCode MatZeroRowsIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
5870{
5871 PetscInt numRows;
5872 const PetscInt *rows;
5873 PetscErrorCode ierr;
5874
5875 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5875; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5876 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5876,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5876,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5876,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5876,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5877 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5877,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5878 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),5878,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),5878,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5878,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),5878,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
5879 ierr = ISGetLocalSize(is,&numRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5879,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5880 ierr = ISGetIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5880,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5881 ierr = MatZeroRows(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5881,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5882 ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5882,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5883 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5884}
5885
5886/*@
5887 MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
5888 of a set of rows of a matrix. These rows must be local to the process.
5889
5890 Collective on Mat
5891
5892 Input Parameters:
5893+ mat - the matrix
5894. numRows - the number of rows to remove
5895. rows - the grid coordinates (and component number when dof > 1) for matrix rows
5896. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5897. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5898- b - optional vector of right hand side, that will be adjusted by provided solution
5899
5900 Notes:
5901 For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
5902 but does not release memory. For the dense and block diagonal
5903 formats this does not alter the nonzero structure.
5904
5905 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
5906 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
5907 merely zeroed.
5908
5909 The user can set a value in the diagonal entry (or for the AIJ and
5910 row formats can optionally remove the main diagonal entry from the
5911 nonzero structure as well, by passing 0.0 as the final argument).
5912
5913 For the parallel case, all processes that share the matrix (i.e.,
5914 those in the communicator used for matrix creation) MUST call this
5915 routine, regardless of whether any rows being zeroed are owned by
5916 them.
5917
5918 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5919 list only rows local to itself).
5920
5921 The grid coordinates are across the entire grid, not just the local portion
5922
5923 In Fortran idxm and idxn should be declared as
5924$ MatStencil idxm(4,m)
5925 and the values inserted using
5926$ idxm(MatStencil_i,1) = i
5927$ idxm(MatStencil_j,1) = j
5928$ idxm(MatStencil_k,1) = k
5929$ idxm(MatStencil_c,1) = c
5930 etc
5931
5932 For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
5933 obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
5934 etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
5935 DM_BOUNDARY_PERIODIC boundary type.
5936
5937 For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
5938 a single value per point) you can skip filling those indices.
5939
5940 Level: intermediate
5941
5942.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsl(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5943 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
5944@*/
5945PetscErrorCode MatZeroRowsStencil(Mat mat,PetscInt numRows,const MatStencil rows[],PetscScalar diag,Vec x,Vec b)
5946{
5947 PetscInt dim = mat->stencil.dim;
5948 PetscInt sdim = dim - (1 - (PetscInt) mat->stencil.noc);
5949 PetscInt *dims = mat->stencil.dims+1;
5950 PetscInt *starts = mat->stencil.starts;
5951 PetscInt *dxm = (PetscInt*) rows;
5952 PetscInt *jdxm, i, j, tmp, numNewRows = 0;
5953 PetscErrorCode ierr;
5954
5955 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5955; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5956 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5956,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5956,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5956,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5956,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5957 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5957,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5958 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),5958
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),5958,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
5959
5960 ierr = PetscMalloc1(numRows, &jdxm)PetscMallocA(1,PETSC_FALSE,5960,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(numRows)*sizeof(**(&jdxm)),(&jdxm))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5960,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5961 for (i = 0; i < numRows; ++i) {
5962 /* Skip unused dimensions (they are ordered k, j, i, c) */
5963 for (j = 0; j < 3-sdim; ++j) dxm++;
5964 /* Local index in X dir */
5965 tmp = *dxm++ - starts[0];
5966 /* Loop over remaining dimensions */
5967 for (j = 0; j < dim-1; ++j) {
5968 /* If nonlocal, set index to be negative */
5969 if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT(-2147483647 - 1);
5970 /* Update local index */
5971 else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
5972 }
5973 /* Skip component slot if necessary */
5974 if (mat->stencil.noc) dxm++;
5975 /* Local row number */
5976 if (tmp >= 0) {
5977 jdxm[numNewRows++] = tmp;
5978 }
5979 }
5980 ierr = MatZeroRowsLocal(mat,numNewRows,jdxm,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5980,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5981 ierr = PetscFree(jdxm)((*PetscTrFree)((void*)(jdxm),5981,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((jdxm) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5981,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5982 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5983}
5984
5985/*@
5986 MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
5987 of a set of rows and columns of a matrix.
5988
5989 Collective on Mat
5990
5991 Input Parameters:
5992+ mat - the matrix
5993. numRows - the number of rows/columns to remove
5994. rows - the grid coordinates (and component number when dof > 1) for matrix rows
5995. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5996. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5997- b - optional vector of right hand side, that will be adjusted by provided solution
5998
5999 Notes:
6000 For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
6001 but does not release memory. For the dense and block diagonal
6002 formats this does not alter the nonzero structure.
6003
6004 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6005 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6006 merely zeroed.
6007
6008 The user can set a value in the diagonal entry (or for the AIJ and
6009 row formats can optionally remove the main diagonal entry from the
6010 nonzero structure as well, by passing 0.0 as the final argument).
6011
6012 For the parallel case, all processes that share the matrix (i.e.,
6013 those in the communicator used for matrix creation) MUST call this
6014 routine, regardless of whether any rows being zeroed are owned by
6015 them.
6016
6017 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6018 list only rows local to itself, but the row/column numbers are given in local numbering).
6019
6020 The grid coordinates are across the entire grid, not just the local portion
6021
6022 In Fortran idxm and idxn should be declared as
6023$ MatStencil idxm(4,m)
6024 and the values inserted using
6025$ idxm(MatStencil_i,1) = i
6026$ idxm(MatStencil_j,1) = j
6027$ idxm(MatStencil_k,1) = k
6028$ idxm(MatStencil_c,1) = c
6029 etc
6030
6031 For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6032 obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6033 etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6034 DM_BOUNDARY_PERIODIC boundary type.
6035
6036 For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6037 a single value per point) you can skip filling those indices.
6038
6039 Level: intermediate
6040
6041.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
6042 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRows()
6043@*/
6044PetscErrorCode MatZeroRowsColumnsStencil(Mat mat,PetscInt numRows,const MatStencil rows[],PetscScalar diag,Vec x,Vec b)
6045{
6046 PetscInt dim = mat->stencil.dim;
6047 PetscInt sdim = dim - (1 - (PetscInt) mat->stencil.noc);
6048 PetscInt *dims = mat->stencil.dims+1;
6049 PetscInt *starts = mat->stencil.starts;
6050 PetscInt *dxm = (PetscInt*) rows;
6051 PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6052 PetscErrorCode ierr;
6053
6054 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6054; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6055 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6055,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6055,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6055,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6055,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6056 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6056,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6057 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),6057
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6057,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
6058
6059 ierr = PetscMalloc1(numRows, &jdxm)PetscMallocA(1,PETSC_FALSE,6059,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(numRows)*sizeof(**(&jdxm)),(&jdxm))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6059,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6060 for (i = 0; i < numRows; ++i) {
6061 /* Skip unused dimensions (they are ordered k, j, i, c) */
6062 for (j = 0; j < 3-sdim; ++j) dxm++;
6063 /* Local index in X dir */
6064 tmp = *dxm++ - starts[0];
6065 /* Loop over remaining dimensions */
6066 for (j = 0; j < dim-1; ++j) {
6067 /* If nonlocal, set index to be negative */
6068 if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT(-2147483647 - 1);
6069 /* Update local index */
6070 else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
6071 }
6072 /* Skip component slot if necessary */
6073 if (mat->stencil.noc) dxm++;
6074 /* Local row number */
6075 if (tmp >= 0) {
6076 jdxm[numNewRows++] = tmp;
6077 }
6078 }
6079 ierr = MatZeroRowsColumnsLocal(mat,numNewRows,jdxm,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6079,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6080 ierr = PetscFree(jdxm)((*PetscTrFree)((void*)(jdxm),6080,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((jdxm) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6080,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6081 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6082}
6083
6084/*@C
6085 MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6086 of a set of rows of a matrix; using local numbering of rows.
6087
6088 Collective on Mat
6089
6090 Input Parameters:
6091+ mat - the matrix
6092. numRows - the number of rows to remove
6093. rows - the global row indices
6094. diag - value put in all diagonals of eliminated rows
6095. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6096- b - optional vector of right hand side, that will be adjusted by provided solution
6097
6098 Notes:
6099 Before calling MatZeroRowsLocal(), the user must first set the
6100 local-to-global mapping by calling MatSetLocalToGlobalMapping().
6101
6102 For the AIJ matrix formats this removes the old nonzero structure,
6103 but does not release memory. For the dense and block diagonal
6104 formats this does not alter the nonzero structure.
6105
6106 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6107 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6108 merely zeroed.
6109
6110 The user can set a value in the diagonal entry (or for the AIJ and
6111 row formats can optionally remove the main diagonal entry from the
6112 nonzero structure as well, by passing 0.0 as the final argument).
6113
6114 You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
6115 owns that are to be zeroed. This saves a global synchronization in the implementation.
6116
6117 Level: intermediate
6118
6119.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRows(), MatSetOption(),
6120 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
6121@*/
6122PetscErrorCode MatZeroRowsLocal(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
6123{
6124 PetscErrorCode ierr;
6125
6126 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6126; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6127 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6127,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6127,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6127,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6127,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6128 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6128,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6129 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),6129
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6129,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
6130 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6130,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6131 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6131,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6132 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6132,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6133
6134 if (mat->ops->zerorowslocal) {
6135 ierr = (*mat->ops->zerorowslocal)(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6136 } else {
6137 IS is, newis;
6138 const PetscInt *newRows;
6139
6140 if (!mat->rmap->mapping) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Need to provide local to global mapping to matrix first")return PetscError(PetscObjectComm((PetscObject)mat),6140,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Need to provide local to global mapping to matrix first")
;
6141 ierr = ISCreateGeneral(PETSC_COMM_SELF((MPI_Comm)0x44000001),numRows,rows,PETSC_COPY_VALUES,&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6141,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6142 ierr = ISLocalToGlobalMappingApplyIS(mat->rmap->mapping,is,&newis);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6142,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6143 ierr = ISGetIndices(newis,&newRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6143,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6144 ierr = (*mat->ops->zerorows)(mat,numRows,newRows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6144,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6145 ierr = ISRestoreIndices(newis,&newRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6145,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6146 ierr = ISDestroy(&newis);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6146,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6147 ierr = ISDestroy(&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6147,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6148 }
6149 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6149,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6150#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
6151 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
6152 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
6153 }
6154#endif
6155 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6156}
6157
6158/*@
6159 MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6160 of a set of rows of a matrix; using local numbering of rows.
6161
6162 Collective on Mat
6163
6164 Input Parameters:
6165+ mat - the matrix
6166. is - index set of rows to remove
6167. diag - value put in all diagonals of eliminated rows
6168. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6169- b - optional vector of right hand side, that will be adjusted by provided solution
6170
6171 Notes:
6172 Before calling MatZeroRowsLocalIS(), the user must first set the
6173 local-to-global mapping by calling MatSetLocalToGlobalMapping().
6174
6175 For the AIJ matrix formats this removes the old nonzero structure,
6176 but does not release memory. For the dense and block diagonal
6177 formats this does not alter the nonzero structure.
6178
6179 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6180 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6181 merely zeroed.
6182
6183 The user can set a value in the diagonal entry (or for the AIJ and
6184 row formats can optionally remove the main diagonal entry from the
6185 nonzero structure as well, by passing 0.0 as the final argument).
6186
6187 You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
6188 owns that are to be zeroed. This saves a global synchronization in the implementation.
6189
6190 Level: intermediate
6191
6192.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRows(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
6193 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
6194@*/
6195PetscErrorCode MatZeroRowsLocalIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
6196{
6197 PetscErrorCode ierr;
6198 PetscInt numRows;
6199 const PetscInt *rows;
6200
6201 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6201; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6202 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6202,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6202,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6202,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6202,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6203 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6203,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6204 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),6204,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),6204,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),6204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),6204,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
6205 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6205,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6206 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6206,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6207 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6207,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6208
6209 ierr = ISGetLocalSize(is,&numRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6210 ierr = ISGetIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6210,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6211 ierr = MatZeroRowsLocal(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6211,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6212 ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6212,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6213 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6214}
6215
6216/*@
6217 MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6218 of a set of rows and columns of a matrix; using local numbering of rows.
6219
6220 Collective on Mat
6221
6222 Input Parameters:
6223+ mat - the matrix
6224. numRows - the number of rows to remove
6225. rows - the global row indices
6226. diag - value put in all diagonals of eliminated rows
6227. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6228- b - optional vector of right hand side, that will be adjusted by provided solution
6229
6230 Notes:
6231 Before calling MatZeroRowsColumnsLocal(), the user must first set the
6232 local-to-global mapping by calling MatSetLocalToGlobalMapping().
6233
6234 The user can set a value in the diagonal entry (or for the AIJ and
6235 row formats can optionally remove the main diagonal entry from the
6236 nonzero structure as well, by passing 0.0 as the final argument).
6237
6238 Level: intermediate
6239
6240.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
6241 MatZeroRows(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
6242@*/
6243PetscErrorCode MatZeroRowsColumnsLocal(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
6244{
6245 PetscErrorCode ierr;
6246 IS is, newis;
6247 const PetscInt *newRows;
6248
6249 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6249; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6250 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6250,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6250,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6250,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6250,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6251 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6251,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6252 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),6252
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6252,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
6253 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6253,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6254 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6254,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6255 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6255,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6256
6257 if (!mat->cmap->mapping) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Need to provide local to global mapping to matrix first")return PetscError(PetscObjectComm((PetscObject)mat),6257,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Need to provide local to global mapping to matrix first")
;
6258 ierr = ISCreateGeneral(PETSC_COMM_SELF((MPI_Comm)0x44000001),numRows,rows,PETSC_COPY_VALUES,&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6258,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6259 ierr = ISLocalToGlobalMappingApplyIS(mat->cmap->mapping,is,&newis);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6259,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6260 ierr = ISGetIndices(newis,&newRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6260,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6261 ierr = (*mat->ops->zerorowscolumns)(mat,numRows,newRows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6261,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6262 ierr = ISRestoreIndices(newis,&newRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6262,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6263 ierr = ISDestroy(&newis);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6263,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6264 ierr = ISDestroy(&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6264,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6265 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6265,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6266#if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
6267 if (mat->valid_GPU_matrix != PETSC_OFFLOAD_UNALLOCATED) {
6268 mat->valid_GPU_matrix = PETSC_OFFLOAD_CPU;
6269 }
6270#endif
6271 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6272}
6273
6274/*@
6275 MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6276 of a set of rows and columns of a matrix; using local numbering of rows.
6277
6278 Collective on Mat
6279
6280 Input Parameters:
6281+ mat - the matrix
6282. is - index set of rows to remove
6283. diag - value put in all diagonals of eliminated rows
6284. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6285- b - optional vector of right hand side, that will be adjusted by provided solution
6286
6287 Notes:
6288 Before calling MatZeroRowsColumnsLocalIS(), the user must first set the
6289 local-to-global mapping by calling MatSetLocalToGlobalMapping().
6290
6291 The user can set a value in the diagonal entry (or for the AIJ and
6292 row formats can optionally remove the main diagonal entry from the
6293 nonzero structure as well, by passing 0.0 as the final argument).
6294
6295 Level: intermediate
6296
6297.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
6298 MatZeroRowsColumnsLocal(), MatZeroRows(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
6299@*/
6300PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
6301{
6302 PetscErrorCode ierr;
6303 PetscInt numRows;
6304 const PetscInt *rows;
6305
6306 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6306; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6307 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6307,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6307,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6307,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6307,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6308 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6308,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6309 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),6309,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),6309,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),6309,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),6309,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
6310 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6310,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6311 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6311,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6312 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6312,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6313
6314 ierr = ISGetLocalSize(is,&numRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6314,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6315 ierr = ISGetIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6315,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6316 ierr = MatZeroRowsColumnsLocal(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6316,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6317 ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6317,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6318 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6319}
6320
6321/*@C
6322 MatGetSize - Returns the numbers of rows and columns in a matrix.
6323
6324 Not Collective
6325
6326 Input Parameter:
6327. mat - the matrix
6328
6329 Output Parameters:
6330+ m - the number of global rows
6331- n - the number of global columns
6332
6333 Note: both output parameters can be NULL on input.
6334
6335 Level: beginner
6336
6337.seealso: MatGetLocalSize()
6338@*/
6339PetscErrorCode MatGetSize(Mat mat,PetscInt *m,PetscInt *n)
6340{
6341 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6341; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6342 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6342,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6342,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6342,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6342,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6343 if (m) *m = mat->rmap->N;
6344 if (n) *n = mat->cmap->N;
6345 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6346}
6347
6348/*@C
6349 MatGetLocalSize - Returns the number of rows and columns in a matrix
6350 stored locally. This information may be implementation dependent, so
6351 use with care.
6352
6353 Not Collective
6354
6355 Input Parameters:
6356. mat - the matrix
6357
6358 Output Parameters:
6359+ m - the number of local rows
6360- n - the number of local columns
6361
6362 Note: both output parameters can be NULL on input.
6363
6364 Level: beginner
6365
6366.seealso: MatGetSize()
6367@*/
6368PetscErrorCode MatGetLocalSize(Mat mat,PetscInt *m,PetscInt *n)
6369{
6370 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6370; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6371 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6371,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6371,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6371,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6371,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6372 if (m) PetscValidIntPointer(m,2)do { if (!m) return PetscError(((MPI_Comm)0x44000001),6372,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(m,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6372,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",2); } while (0)
;
6373 if (n) PetscValidIntPointer(n,3)do { if (!n) return PetscError(((MPI_Comm)0x44000001),6373,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6373,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",3); } while (0)
;
6374 if (m) *m = mat->rmap->n;
6375 if (n) *n = mat->cmap->n;
6376 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6377}
6378
6379/*@C
6380 MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a vector one multiplies by that owned by
6381 this processor. (The columns of the "diagonal block")
6382
6383 Not Collective, unless matrix has not been allocated, then collective on Mat
6384
6385 Input Parameters:
6386. mat - the matrix
6387
6388 Output Parameters:
6389+ m - the global index of the first local column
6390- n - one more than the global index of the last local column
6391
6392 Notes:
6393 both output parameters can be NULL on input.
6394
6395 Level: developer
6396
6397.seealso: MatGetOwnershipRange(), MatGetOwnershipRanges(), MatGetOwnershipRangesColumn()
6398
6399@*/
6400PetscErrorCode MatGetOwnershipRangeColumn(Mat mat,PetscInt *m,PetscInt *n)
6401{
6402 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6402; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6403 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6403,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6403,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6403,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6403,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6404 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6404,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6405 if (m) PetscValidIntPointer(m,2)do { if (!m) return PetscError(((MPI_Comm)0x44000001),6405,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(m,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6405,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",2); } while (0)
;
6406 if (n) PetscValidIntPointer(n,3)do { if (!n) return PetscError(((MPI_Comm)0x44000001),6406,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6406,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",3); } while (0)
;
6407 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6407,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6408 if (m) *m = mat->cmap->rstart;
6409 if (n) *n = mat->cmap->rend;
6410 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6411}
6412
6413/*@C
6414 MatGetOwnershipRange - Returns the range of matrix rows owned by
6415 this processor, assuming that the matrix is laid out with the first
6416 n1 rows on the first processor, the next n2 rows on the second, etc.
6417 For certain parallel layouts this range may not be well defined.
6418
6419 Not Collective
6420
6421 Input Parameters:
6422. mat - the matrix
6423
6424 Output Parameters:
6425+ m - the global index of the first local row
6426- n - one more than the global index of the last local row
6427
6428 Note: Both output parameters can be NULL on input.
6429$ This function requires that the matrix be preallocated. If you have not preallocated, consider using
6430$ PetscSplitOwnership(MPI_Comm comm, PetscInt *n, PetscInt *N)
6431$ and then MPI_Scan() to calculate prefix sums of the local sizes.
6432
6433 Level: beginner
6434
6435.seealso: MatGetOwnershipRanges(), MatGetOwnershipRangeColumn(), MatGetOwnershipRangesColumn(), PetscSplitOwnership(), PetscSplitOwnershipBlock()
6436
6437@*/
6438PetscErrorCode MatGetOwnershipRange(Mat mat,PetscInt *m,PetscInt *n)
6439{
6440 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6440; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6441 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6441,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6441,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6441,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6441,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6442 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6442,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6443 if (m) PetscValidIntPointer(m,2)do { if (!m) return PetscError(((MPI_Comm)0x44000001),6443,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(m,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6443,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",2); } while (0)
;
6444 if (n) PetscValidIntPointer(n,3)do { if (!n) return PetscError(((MPI_Comm)0x44000001),6444,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6444,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",3); } while (0)
;
6445 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6445,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6446 if (m) *m = mat->rmap->rstart;
6447 if (n) *n = mat->rmap->rend;
6448 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6449}
6450
6451/*@C
6452 MatGetOwnershipRanges - Returns the range of matrix rows owned by
6453 each process
6454
6455 Not Collective, unless matrix has not been allocated, then collective on Mat
6456
6457 Input Parameters:
6458. mat - the matrix
6459
6460 Output Parameters:
6461. ranges - start of each processors portion plus one more than the total length at the end
6462
6463 Level: beginner
6464
6465.seealso: MatGetOwnershipRange(), MatGetOwnershipRangeColumn(), MatGetOwnershipRangesColumn()
6466
6467@*/
6468PetscErrorCode MatGetOwnershipRanges(Mat mat,const PetscInt **ranges)
6469{
6470 PetscErrorCode ierr;
6471
6472 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6472; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6473 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6473,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6473,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6473,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6473,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6474 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6474,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6475 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6475,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6476 ierr = PetscLayoutGetRanges(mat->rmap,ranges);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6477 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6478}
6479
6480/*@C
6481 MatGetOwnershipRangesColumn - Returns the range of matrix columns associated with rows of a vector one multiplies by that owned by
6482 this processor. (The columns of the "diagonal blocks" for each process)
6483
6484 Not Collective, unless matrix has not been allocated, then collective on Mat
6485
6486 Input Parameters:
6487. mat - the matrix
6488
6489 Output Parameters:
6490. ranges - start of each processors portion plus one more then the total length at the end
6491
6492 Level: beginner
6493
6494.seealso: MatGetOwnershipRange(), MatGetOwnershipRangeColumn(), MatGetOwnershipRanges()
6495
6496@*/
6497PetscErrorCode MatGetOwnershipRangesColumn(Mat mat,const PetscInt **ranges)
6498{
6499 PetscErrorCode ierr;
6500
6501 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6501; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6502 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6502,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6502,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6502,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6502,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6503 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6503,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6504 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6504,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6505 ierr = PetscLayoutGetRanges(mat->cmap,ranges);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6505,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6506 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6507}
6508
6509/*@C
6510 MatGetOwnershipIS - Get row and column ownership as index sets
6511
6512 Not Collective
6513
6514 Input Arguments:
6515. A - matrix of type Elemental
6516
6517 Output Arguments:
6518+ rows - rows in which this process owns elements
6519. cols - columns in which this process owns elements
6520
6521 Level: intermediate
6522
6523.seealso: MatGetOwnershipRange(), MatGetOwnershipRangeColumn(), MatSetValues(), MATELEMENTAL
6524@*/
6525PetscErrorCode MatGetOwnershipIS(Mat A,IS *rows,IS *cols)
6526{
6527 PetscErrorCode ierr,(*f)(Mat,IS*,IS*);
6528
6529 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6529; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6530 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6530,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
6531 ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetOwnershipIS_C",&f)PetscObjectQueryFunction_Private(((PetscObject)A),("MatGetOwnershipIS_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6531,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6532 if (f) {
6533 ierr = (*f)(A,rows,cols);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6533,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6534 } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6535 if (rows) {ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),A->rmap->n,A->rmap->rstart,1,rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6535,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
6536 if (cols) {ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),A->cmap->N,0,1,cols);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6536,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
6537 }
6538 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6539}
6540
6541/*@C
6542 MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix.
6543 Uses levels of fill only, not drop tolerance. Use MatLUFactorNumeric()
6544 to complete the factorization.
6545
6546 Collective on Mat
6547
6548 Input Parameters:
6549+ mat - the matrix
6550. row - row permutation
6551. column - column permutation
6552- info - structure containing
6553$ levels - number of levels of fill.
6554$ expected fill - as ratio of original fill.
6555$ 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6556 missing diagonal entries)
6557
6558 Output Parameters:
6559. fact - new matrix that has been symbolically factored
6560
6561 Notes:
6562 See Users-Manual: ch_mat for additional information about choosing the fill factor for better efficiency.
6563
6564 Most users should employ the simplified KSP interface for linear solvers
6565 instead of working directly with matrix algebra routines such as this.
6566 See, e.g., KSPCreate().
6567
6568 Level: developer
6569
6570.seealso: MatLUFactorSymbolic(), MatLUFactorNumeric(), MatCholeskyFactor()
6571 MatGetOrdering(), MatFactorInfo
6572
6573 Note: this uses the definition of level of fill as in Y. Saad, 2003
6574
6575 Developer Note: fortran interface is not autogenerated as the f90
6576 interface defintion cannot be generated correctly [due to MatFactorInfo]
6577
6578 References:
6579 Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6580@*/
6581PetscErrorCode MatILUFactorSymbolic(Mat fact,Mat mat,IS row,IS col,const MatFactorInfo *info)
6582{
6583 PetscErrorCode ierr;
6584
6585 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6585; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6586 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6586,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6586,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6586,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6586,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6587 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6587,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6588 PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),6588,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6588,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6588,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),6588,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
6589 PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),6589,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6589,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6589,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6589,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6590 PetscValidPointer(info,4)do { if (!info) return PetscError(((MPI_Comm)0x44000001),6590
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6590,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
6591 PetscValidPointer(fact,5)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),6591
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6591,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
6592 if (info->levels < 0) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Levels of fill negative %D",(PetscInt)info->levels)return PetscError(PetscObjectComm((PetscObject)mat),6592,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Levels of fill negative %D",(PetscInt)info->levels)
;
6593 if (info->fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Expected fill less than 1.0 %g",(double)info->fill)return PetscError(PetscObjectComm((PetscObject)mat),6593,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Expected fill less than 1.0 %g",(double)info->fill)
;
6594 if (!(fact)->ops->ilufactorsymbolic) {
6595 MatSolverType spackage;
6596 ierr = MatFactorGetSolverType(fact,&spackage);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6596,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6597 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic ILU using solver package %s",((PetscObject)mat)->type_name,spackage)return PetscError(PetscObjectComm((PetscObject)mat),6597,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix type %s symbolic ILU using solver package %s",((PetscObject
)mat)->type_name,spackage)
;
6598 }
6599 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6599,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6600 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6600,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6601 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6601,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
6602
6603 ierr = PetscLogEventBegin(MAT_ILUFactorSymbolic,mat,row,col,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ILUFactorSymbolic].active) ? (*PetscLogPLB)((MAT_ILUFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(row),(PetscObject)(col),
(PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6603,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6604 ierr = (fact->ops->ilufactorsymbolic)(fact,mat,row,col,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6604,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6605 ierr = PetscLogEventEnd(MAT_ILUFactorSymbolic,mat,row,col,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ILUFactorSymbolic].active) ? (*PetscLogPLE)((MAT_ILUFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(row),(PetscObject)(col),
(PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6605,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6606 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6607}
6608
6609/*@C
6610 MatICCFactorSymbolic - Performs symbolic incomplete
6611 Cholesky factorization for a symmetric matrix. Use
6612 MatCholeskyFactorNumeric() to complete the factorization.
6613
6614 Collective on Mat
6615
6616 Input Parameters:
6617+ mat - the matrix
6618. perm - row and column permutation
6619- info - structure containing
6620$ levels - number of levels of fill.
6621$ expected fill - as ratio of original fill.
6622
6623 Output Parameter:
6624. fact - the factored matrix
6625
6626 Notes:
6627 Most users should employ the KSP interface for linear solvers
6628 instead of working directly with matrix algebra routines such as this.
6629 See, e.g., KSPCreate().
6630
6631 Level: developer
6632
6633.seealso: MatCholeskyFactorNumeric(), MatCholeskyFactor(), MatFactorInfo
6634
6635 Note: this uses the definition of level of fill as in Y. Saad, 2003
6636
6637 Developer Note: fortran interface is not autogenerated as the f90
6638 interface defintion cannot be generated correctly [due to MatFactorInfo]
6639
6640 References:
6641 Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6642@*/
6643PetscErrorCode MatICCFactorSymbolic(Mat fact,Mat mat,IS perm,const MatFactorInfo *info)
6644{
6645 PetscErrorCode ierr;
6646
6647 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6647; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6648 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6648,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6648,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6648,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6648,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6649 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6649,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6650 PetscValidHeaderSpecific(perm,IS_CLASSID,2)do { if (!perm) return PetscError(((MPI_Comm)0x44000001),6650
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(perm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6650,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(perm))->classid != IS_CLASSID) { if
(((PetscObject)(perm))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),6650,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),6650,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
6651 PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),6651
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6651,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
6652 PetscValidPointer(fact,4)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),6652
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6652,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
6653 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6653,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6654 if (info->levels < 0) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Levels negative %D",(PetscInt) info->levels)return PetscError(PetscObjectComm((PetscObject)mat),6654,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Levels negative %D",(PetscInt) info->levels)
;
6655 if (info->fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Expected fill less than 1.0 %g",(double)info->fill)return PetscError(PetscObjectComm((PetscObject)mat),6655,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Expected fill less than 1.0 %g",(double)info->fill)
;
6656 if (!(fact)->ops->iccfactorsymbolic) {
6657 MatSolverType spackage;
6658 ierr = MatFactorGetSolverType(fact,&spackage);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6658,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6659 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic ICC using solver package %s",((PetscObject)mat)->type_name,spackage)return PetscError(PetscObjectComm((PetscObject)mat),6659,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix type %s symbolic ICC using solver package %s",((PetscObject
)mat)->type_name,spackage)
;
6660 }
6661 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6661,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6662 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6662,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
6663
6664 ierr = PetscLogEventBegin(MAT_ICCFactorSymbolic,mat,perm,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ICCFactorSymbolic].active) ? (*PetscLogPLB)((MAT_ICCFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6664,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6665 ierr = (fact->ops->iccfactorsymbolic)(fact,mat,perm,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6665,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6666 ierr = PetscLogEventEnd(MAT_ICCFactorSymbolic,mat,perm,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ICCFactorSymbolic].active) ? (*PetscLogPLE)((MAT_ICCFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6666,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6667 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6668}
6669
6670/*@C
6671 MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
6672 points to an array of valid matrices, they may be reused to store the new
6673 submatrices.
6674
6675 Collective on Mat
6676
6677 Input Parameters:
6678+ mat - the matrix
6679. n - the number of submatrixes to be extracted (on this processor, may be zero)
6680. irow, icol - index sets of rows and columns to extract
6681- scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
6682
6683 Output Parameter:
6684. submat - the array of submatrices
6685
6686 Notes:
6687 MatCreateSubMatrices() can extract ONLY sequential submatrices
6688 (from both sequential and parallel matrices). Use MatCreateSubMatrix()
6689 to extract a parallel submatrix.
6690
6691 Some matrix types place restrictions on the row and column
6692 indices, such as that they be sorted or that they be equal to each other.
6693
6694 The index sets may not have duplicate entries.
6695
6696 When extracting submatrices from a parallel matrix, each processor can
6697 form a different submatrix by setting the rows and columns of its
6698 individual index sets according to the local submatrix desired.
6699
6700 When finished using the submatrices, the user should destroy
6701 them with MatDestroySubMatrices().
6702
6703 MAT_REUSE_MATRIX can only be used when the nonzero structure of the
6704 original matrix has not changed from that last call to MatCreateSubMatrices().
6705
6706 This routine creates the matrices in submat; you should NOT create them before
6707 calling it. It also allocates the array of matrix pointers submat.
6708
6709 For BAIJ matrices the index sets must respect the block structure, that is if they
6710 request one row/column in a block, they must request all rows/columns that are in
6711 that block. For example, if the block size is 2 you cannot request just row 0 and
6712 column 0.
6713
6714 Fortran Note:
6715 The Fortran interface is slightly different from that given below; it
6716 requires one to pass in as submat a Mat (integer) array of size at least n+1.
6717
6718 Level: advanced
6719
6720
6721.seealso: MatDestroySubMatrices(), MatCreateSubMatrix(), MatGetRow(), MatGetDiagonal(), MatReuse
6722@*/
6723PetscErrorCode MatCreateSubMatrices(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
6724{
6725 PetscErrorCode ierr;
6726 PetscInt i;
6727 PetscBool eq;
6728
6729 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6729; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6730 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6730,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6730,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6730,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6730,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6731 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6731,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6732 if (n) {
6733 PetscValidPointer(irow,3)do { if (!irow) return PetscError(((MPI_Comm)0x44000001),6733
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(irow,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6733,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
6734 PetscValidHeaderSpecific(*irow,IS_CLASSID,3)do { if (!*irow) return PetscError(((MPI_Comm)0x44000001),6734
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(*irow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6734,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(*irow))->classid != IS_CLASSID) { if
(((PetscObject)(*irow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6734,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6734,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6735 PetscValidPointer(icol,4)do { if (!icol) return PetscError(((MPI_Comm)0x44000001),6735
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(icol,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6735,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
6736 PetscValidHeaderSpecific(*icol,IS_CLASSID,4)do { if (!*icol) return PetscError(((MPI_Comm)0x44000001),6736
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",4); if (
!PetscCheckPointer(*icol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6736,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(*icol))->classid != IS_CLASSID) { if
(((PetscObject)(*icol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6736,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),6736,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
6737 }
6738 PetscValidPointer(submat,6)do { if (!submat) return PetscError(((MPI_Comm)0x44000001),6738
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",6); if
(!PetscCheckPointer(submat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),6738,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",6);
} while (0)
;
6739 if (n && scall == MAT_REUSE_MATRIX) {
6740 PetscValidPointer(*submat,6)do { if (!*submat) return PetscError(((MPI_Comm)0x44000001),6740
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",6); if
(!PetscCheckPointer(*submat,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),6740,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",6);
} while (0)
;
6741 PetscValidHeaderSpecific(**submat,MAT_CLASSID,6)do { if (!**submat) return PetscError(((MPI_Comm)0x44000001),
6741,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",6); if (
!PetscCheckPointer(**submat,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),6741,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,6); if (((PetscObject)(**submat))->classid != MAT_CLASSID
) { if (((PetscObject)(**submat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6741,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,6); else return PetscError(((MPI_Comm)0x44000001),6741,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",6); } } while (0)
;
6742 }
6743 if (!mat->ops->createsubmatrices) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),6743,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
6744 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6744,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6745 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6745,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6746 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6746,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6747
6748 ierr = PetscLogEventBegin(MAT_CreateSubMats,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMats].active) ? (*PetscLogPLB)((MAT_CreateSubMats
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6748,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6749 ierr = (*mat->ops->createsubmatrices)(mat,n,irow,icol,scall,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6749,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6750 ierr = PetscLogEventEnd(MAT_CreateSubMats,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMats].active) ? (*PetscLogPLE)((MAT_CreateSubMats
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6750,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6751 for (i=0; i<n; i++) {
6752 (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
6753 if (mat->symmetric || mat->structurally_symmetric || mat->hermitian) {
6754 ierr = ISEqual(irow[i],icol[i],&eq);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6755 if (eq) {
6756 if (mat->symmetric) {
6757 ierr = MatSetOption((*submat)[i],MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6757,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6758 } else if (mat->hermitian) {
6759 ierr = MatSetOption((*submat)[i],MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6759,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6760 } else if (mat->structurally_symmetric) {
6761 ierr = MatSetOption((*submat)[i],MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6762 }
6763 }
6764 }
6765 }
6766 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6767}
6768
6769/*@C
6770 MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of IS that may live on subcomms).
6771
6772 Collective on Mat
6773
6774 Input Parameters:
6775+ mat - the matrix
6776. n - the number of submatrixes to be extracted
6777. irow, icol - index sets of rows and columns to extract
6778- scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
6779
6780 Output Parameter:
6781. submat - the array of submatrices
6782
6783 Level: advanced
6784
6785
6786.seealso: MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRow(), MatGetDiagonal(), MatReuse
6787@*/
6788PetscErrorCode MatCreateSubMatricesMPI(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
6789{
6790 PetscErrorCode ierr;
6791 PetscInt i;
6792 PetscBool eq;
6793
6794 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6794; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6795 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6795,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6795,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6795,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6795,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6796 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6796,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6797 if (n) {
6798 PetscValidPointer(irow,3)do { if (!irow) return PetscError(((MPI_Comm)0x44000001),6798
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(irow,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6798,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
6799 PetscValidHeaderSpecific(*irow,IS_CLASSID,3)do { if (!*irow) return PetscError(((MPI_Comm)0x44000001),6799
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(*irow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6799,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(*irow))->classid != IS_CLASSID) { if
(((PetscObject)(*irow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6799,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6799,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6800 PetscValidPointer(icol,4)do { if (!icol) return PetscError(((MPI_Comm)0x44000001),6800
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(icol,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6800,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
6801 PetscValidHeaderSpecific(*icol,IS_CLASSID,4)do { if (!*icol) return PetscError(((MPI_Comm)0x44000001),6801
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",4); if (
!PetscCheckPointer(*icol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6801,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(*icol))->classid != IS_CLASSID) { if
(((PetscObject)(*icol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6801,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),6801,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
6802 }
6803 PetscValidPointer(submat,6)do { if (!submat) return PetscError(((MPI_Comm)0x44000001),6803
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",6); if
(!PetscCheckPointer(submat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),6803,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",6);
} while (0)
;
6804 if (n && scall == MAT_REUSE_MATRIX) {
6805 PetscValidPointer(*submat,6)do { if (!*submat) return PetscError(((MPI_Comm)0x44000001),6805
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",6); if
(!PetscCheckPointer(*submat,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),6805,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",6);
} while (0)
;
6806 PetscValidHeaderSpecific(**submat,MAT_CLASSID,6)do { if (!**submat) return PetscError(((MPI_Comm)0x44000001),
6806,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",6); if (
!PetscCheckPointer(**submat,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),6806,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,6); if (((PetscObject)(**submat))->classid != MAT_CLASSID
) { if (((PetscObject)(**submat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6806,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,6); else return PetscError(((MPI_Comm)0x44000001),6806,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",6); } } while (0)
;
6807 }
6808 if (!mat->ops->createsubmatricesmpi) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),6808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
6809 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6809,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6810 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6810,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6811 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6812
6813 ierr = PetscLogEventBegin(MAT_CreateSubMats,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMats].active) ? (*PetscLogPLB)((MAT_CreateSubMats
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6813,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6814 ierr = (*mat->ops->createsubmatricesmpi)(mat,n,irow,icol,scall,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6814,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6815 ierr = PetscLogEventEnd(MAT_CreateSubMats,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMats].active) ? (*PetscLogPLE)((MAT_CreateSubMats
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6815,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6816 for (i=0; i<n; i++) {
6817 if (mat->symmetric || mat->structurally_symmetric || mat->hermitian) {
6818 ierr = ISEqual(irow[i],icol[i],&eq);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6818,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6819 if (eq) {
6820 if (mat->symmetric) {
6821 ierr = MatSetOption((*submat)[i],MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6821,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6822 } else if (mat->hermitian) {
6823 ierr = MatSetOption((*submat)[i],MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6823,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6824 } else if (mat->structurally_symmetric) {
6825 ierr = MatSetOption((*submat)[i],MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6825,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6826 }
6827 }
6828 }
6829 }
6830 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6831}
6832
6833/*@C
6834 MatDestroyMatrices - Destroys an array of matrices.
6835
6836 Collective on Mat
6837
6838 Input Parameters:
6839+ n - the number of local matrices
6840- mat - the matrices (note that this is a pointer to the array of matrices)
6841
6842 Level: advanced
6843
6844 Notes:
6845 Frees not only the matrices, but also the array that contains the matrices
6846 In Fortran will not free the array.
6847
6848.seealso: MatCreateSubMatrices() MatDestroySubMatrices()
6849@*/
6850PetscErrorCode MatDestroyMatrices(PetscInt n,Mat *mat[])
6851{
6852 PetscErrorCode ierr;
6853 PetscInt i;
6854
6855 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6855; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6856 if (!*mat) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6857 if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to destroy negative number of matrices %D",n)return PetscError(((MPI_Comm)0x44000001),6857,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,63,PETSC_ERROR_INITIAL,"Trying to destroy negative number of matrices %D"
,n)
;
6858 PetscValidPointer(mat,2)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6858,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(mat,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6858,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
6859
6860 for (i=0; i<n; i++) {
6861 ierr = MatDestroy(&(*mat)[i]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6861,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6862 }
6863
6864 /* memory is allocated even if n = 0 */
6865 ierr = PetscFree(*mat)((*PetscTrFree)((void*)(*mat),6865,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((*mat) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6865,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6866 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6867}
6868
6869/*@C
6870 MatDestroySubMatrices - Destroys a set of matrices obtained with MatCreateSubMatrices().
6871
6872 Collective on Mat
6873
6874 Input Parameters:
6875+ n - the number of local matrices
6876- mat - the matrices (note that this is a pointer to the array of matrices, just to match the calling
6877 sequence of MatCreateSubMatrices())
6878
6879 Level: advanced
6880
6881 Notes:
6882 Frees not only the matrices, but also the array that contains the matrices
6883 In Fortran will not free the array.
6884
6885.seealso: MatCreateSubMatrices()
6886@*/
6887PetscErrorCode MatDestroySubMatrices(PetscInt n,Mat *mat[])
6888{
6889 PetscErrorCode ierr;
6890 Mat mat0;
6891
6892 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6892; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6893 if (!*mat) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6894 /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
6895 if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to destroy negative number of matrices %D",n)return PetscError(((MPI_Comm)0x44000001),6895,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,63,PETSC_ERROR_INITIAL,"Trying to destroy negative number of matrices %D"
,n)
;
6896 PetscValidPointer(mat,2)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6896,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(mat,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6896,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
6897
6898 mat0 = (*mat)[0];
6899 if (mat0 && mat0->ops->destroysubmatrices) {
6900 ierr = (mat0->ops->destroysubmatrices)(n,mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6900,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6901 } else {
6902 ierr = MatDestroyMatrices(n,mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6902,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6903 }
6904 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6905}
6906
6907/*@C
6908 MatGetSeqNonzeroStructure - Extracts the sequential nonzero structure from a matrix.
6909
6910 Collective on Mat
6911
6912 Input Parameters:
6913. mat - the matrix
6914
6915 Output Parameter:
6916. matstruct - the sequential matrix with the nonzero structure of mat
6917
6918 Level: intermediate
6919
6920.seealso: MatDestroySeqNonzeroStructure(), MatCreateSubMatrices(), MatDestroyMatrices()
6921@*/
6922PetscErrorCode MatGetSeqNonzeroStructure(Mat mat,Mat *matstruct)
6923{
6924 PetscErrorCode ierr;
6925
6926 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6926; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6927 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6927,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6927,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6927,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6927,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6928 PetscValidPointer(matstruct,2)do { if (!matstruct) return PetscError(((MPI_Comm)0x44000001)
,6928,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(matstruct,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),6928,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
6929
6930 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6930,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6931 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6931,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6932 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6932,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6933
6934 if (!mat->ops->getseqnonzerostructure) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not for matrix type %s\n",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),6934,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Not for matrix type %s\n",((PetscObject)mat)->type_name)
;
6935 ierr = PetscLogEventBegin(MAT_GetSeqNonzeroStructure,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetSeqNonzeroStructure].active) ? (*PetscLogPLB)((MAT_GetSeqNonzeroStructure
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6935,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6936 ierr = (*mat->ops->getseqnonzerostructure)(mat,matstruct);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6936,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6937 ierr = PetscLogEventEnd(MAT_GetSeqNonzeroStructure,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetSeqNonzeroStructure].active) ? (*PetscLogPLE)((MAT_GetSeqNonzeroStructure
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6937,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6938 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6939}
6940
6941/*@C
6942 MatDestroySeqNonzeroStructure - Destroys matrix obtained with MatGetSeqNonzeroStructure().
6943
6944 Collective on Mat
6945
6946 Input Parameters:
6947. mat - the matrix (note that this is a pointer to the array of matrices, just to match the calling
6948 sequence of MatGetSequentialNonzeroStructure())
6949
6950 Level: advanced
6951
6952 Notes:
6953 Frees not only the matrices, but also the array that contains the matrices
6954
6955.seealso: MatGetSeqNonzeroStructure()
6956@*/
6957PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
6958{
6959 PetscErrorCode ierr;
6960
6961 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6961; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6962 PetscValidPointer(mat,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6962,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(mat,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6962,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0)
;
6963 ierr = MatDestroy(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6963,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6964 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6965}
6966
6967/*@
6968 MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
6969 replaces the index sets by larger ones that represent submatrices with
6970 additional overlap.
6971
6972 Collective on Mat
6973
6974 Input Parameters:
6975+ mat - the matrix
6976. n - the number of index sets
6977. is - the array of index sets (these index sets will changed during the call)
6978- ov - the additional overlap requested
6979
6980 Options Database:
6981. -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
6982
6983 Level: developer
6984
6985
6986.seealso: MatCreateSubMatrices()
6987@*/
6988PetscErrorCode MatIncreaseOverlap(Mat mat,PetscInt n,IS is[],PetscInt ov)
6989{
6990 PetscErrorCode ierr;
6991
6992 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6992; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6993 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6993,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6993,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6993,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6993,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6994 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6994,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6995 if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have one or more domains, you have %D",n)return PetscError(((MPI_Comm)0x44000001),6995,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,63,PETSC_ERROR_INITIAL,"Must have one or more domains, you have %D"
,n)
;
6996 if (n) {
6997 PetscValidPointer(is,3)do { if (!is) return PetscError(((MPI_Comm)0x44000001),6997,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(is
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),6997,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
6998 PetscValidHeaderSpecific(*is,IS_CLASSID,3)do { if (!*is) return PetscError(((MPI_Comm)0x44000001),6998,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(*is,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(*is))->classid != IS_CLASSID) { if
(((PetscObject)(*is))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6998,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6999 }
7000 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),7000,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
7001 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),7001,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
7002 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7002,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7003
7004 if (!ov) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7005 if (!mat->ops->increaseoverlap) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),7005,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
7006 ierr = PetscLogEventBegin(MAT_IncreaseOverlap,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_IncreaseOverlap].active) ? (*PetscLogPLB)((MAT_IncreaseOverlap
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7006,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7007 ierr = (*mat->ops->increaseoverlap)(mat,n,is,ov);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7007,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7008 ierr = PetscLogEventEnd(MAT_IncreaseOverlap,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_IncreaseOverlap].active) ? (*PetscLogPLE)((MAT_IncreaseOverlap
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7008,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7009 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7010}
7011
7012
7013PetscErrorCode MatIncreaseOverlapSplit_Single(Mat,IS*,PetscInt);
7014
7015/*@
7016 MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7017 a sub communicator, replaces the index sets by larger ones that represent submatrices with
7018 additional overlap.
7019
7020 Collective on Mat
7021
7022 Input Parameters:
7023+ mat - the matrix
7024. n - the number of index sets
7025. is - the array of index sets (these index sets will changed during the call)
7026- ov - the additional overlap requested
7027
7028 Options Database:
7029. -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7030
7031 Level: developer
7032
7033
7034.seealso: MatCreateSubMatrices()
7035@*/
7036PetscErrorCode MatIncreaseOverlapSplit(Mat mat,PetscInt n,IS is[],PetscInt ov)
7037{
7038 PetscInt i;
7039 PetscErrorCode ierr;
7040
7041 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7041; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7042 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7042,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7042,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7042,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7042,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7043 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7043,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7044 if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have one or more domains, you have %D",n)return PetscError(((MPI_Comm)0x44000001),7044,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,63,PETSC_ERROR_INITIAL,"Must have one or more domains, you have %D"
,n)
;
7045 if (n) {
7046 PetscValidPointer(is,3)do { if (!is) return PetscError(((MPI_Comm)0x44000001),7046,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(is
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),7046,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
7047 PetscValidHeaderSpecific(*is,IS_CLASSID,3)do { if (!*is) return PetscError(((MPI_Comm)0x44000001),7047,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(*is,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7047,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(*is))->classid != IS_CLASSID) { if
(((PetscObject)(*is))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7047,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7047,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7048 }
7049 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),7049,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
7050 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),7050,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
7051 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7051,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7052 if (!ov) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7053 ierr = PetscLogEventBegin(MAT_IncreaseOverlap,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_IncreaseOverlap].active) ? (*PetscLogPLB)((MAT_IncreaseOverlap
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7053,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7054 for(i=0; i<n; i++){
7055 ierr = MatIncreaseOverlapSplit_Single(mat,&is[i],ov);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7055,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7056 }
7057 ierr = PetscLogEventEnd(MAT_IncreaseOverlap,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_IncreaseOverlap].active) ? (*PetscLogPLE)((MAT_IncreaseOverlap
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7057,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7058 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7059}
7060
7061
7062
7063
7064/*@
7065 MatGetBlockSize - Returns the matrix block size.
7066
7067 Not Collective
7068
7069 Input Parameter:
7070. mat - the matrix
7071
7072 Output Parameter:
7073. bs - block size
7074
7075 Notes:
7076 Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7077
7078 If the block size has not been set yet this routine returns 1.
7079
7080 Level: intermediate
7081
7082.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSizes()
7083@*/
7084PetscErrorCode MatGetBlockSize(Mat mat,PetscInt *bs)
7085{
7086 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7086; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7087 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7087,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7087,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7087,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7087,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7088 PetscValidIntPointer(bs,2)do { if (!bs) return PetscError(((MPI_Comm)0x44000001),7088,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(bs
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7088,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",2); } while (0)
;
7089 *bs = PetscAbs(mat->rmap->bs)(((mat->rmap->bs) >= 0) ? (mat->rmap->bs) : (-
(mat->rmap->bs)))
;
7090 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7091}
7092
7093/*@
7094 MatGetBlockSizes - Returns the matrix block row and column sizes.
7095
7096 Not Collective
7097
7098 Input Parameter:
7099. mat - the matrix
7100
7101 Output Parameter:
7102. rbs - row block size
7103. cbs - column block size
7104
7105 Notes:
7106 Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7107 If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7108
7109 If a block size has not been set yet this routine returns 1.
7110
7111 Level: intermediate
7112
7113.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSize(), MatSetBlockSizes()
7114@*/
7115PetscErrorCode MatGetBlockSizes(Mat mat,PetscInt *rbs, PetscInt *cbs)
7116{
7117 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7117; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7118 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7118,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7118,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7118,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7118,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7119 if (rbs) PetscValidIntPointer(rbs,2)do { if (!rbs) return PetscError(((MPI_Comm)0x44000001),7119,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(rbs,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7119,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,2); } while (0)
;
7120 if (cbs) PetscValidIntPointer(cbs,3)do { if (!cbs) return PetscError(((MPI_Comm)0x44000001),7120,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(cbs,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7120,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,3); } while (0)
;
7121 if (rbs) *rbs = PetscAbs(mat->rmap->bs)(((mat->rmap->bs) >= 0) ? (mat->rmap->bs) : (-
(mat->rmap->bs)))
;
7122 if (cbs) *cbs = PetscAbs(mat->cmap->bs)(((mat->cmap->bs) >= 0) ? (mat->cmap->bs) : (-
(mat->cmap->bs)))
;
7123 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7124}
7125
7126/*@
7127 MatSetBlockSize - Sets the matrix block size.
7128
7129 Logically Collective on Mat
7130
7131 Input Parameters:
7132+ mat - the matrix
7133- bs - block size
7134
7135 Notes:
7136 Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7137 This must be called before MatSetUp() or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7138
7139 For MATMPIAIJ and MATSEQAIJ matrix formats, this function can be called at a later stage, provided that the specified block size
7140 is compatible with the matrix local sizes.
7141
7142 Level: intermediate
7143
7144.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSizes(), MatGetBlockSizes()
7145@*/
7146PetscErrorCode MatSetBlockSize(Mat mat,PetscInt bs)
7147{
7148 PetscErrorCode ierr;
7149
7150 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7150; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7151 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7151,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7151,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7151,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7151,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7152 PetscValidLogicalCollectiveInt(mat,bs,2)do { PetscErrorCode _7_ierr; PetscInt b1[2],b2[2]; b1[0] = -bs
; b1[1] = bs; _7_ierr = (PetscAllreduceBarrierCheck(PetscObjectComm
((PetscObject)mat),2,7152,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),7152,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),7152
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Int value must be same on all processes, argument # %d"
,2); } while (0)
;
7153 ierr = MatSetBlockSizes(mat,bs,bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7153,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7154 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7155}
7156
7157/*@
7158 MatSetVariableBlockSizes - Sets a diagonal blocks of the matrix that need not be of the same size
7159
7160 Logically Collective on Mat
7161
7162 Input Parameters:
7163+ mat - the matrix
7164. nblocks - the number of blocks on this process
7165- bsizes - the block sizes
7166
7167 Notes:
7168 Currently used by PCVPBJACOBI for SeqAIJ matrices
7169
7170 Level: intermediate
7171
7172.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSizes(), MatGetBlockSizes(), MatGetVariableBlockSizes()
7173@*/
7174PetscErrorCode MatSetVariableBlockSizes(Mat mat,PetscInt nblocks,PetscInt *bsizes)
7175{
7176 PetscErrorCode ierr;
7177 PetscInt i,ncnt = 0, nlocal;
7178
7179 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7179; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7180 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7180,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7180,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7180,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7180,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7181 if (nblocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of local blocks must be great than or equal to zero")return PetscError(((MPI_Comm)0x44000001),7181,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Number of local blocks must be great than or equal to zero"
)
;
7182 ierr = MatGetLocalSize(mat,&nlocal,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7182,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7183 for (i=0; i<nblocks; i++) ncnt += bsizes[i];
7184 if (ncnt != nlocal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local block sizes %D does not equal local size of matrix %D",ncnt,nlocal)return PetscError(((MPI_Comm)0x44000001),7184,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Sum of local block sizes %D does not equal local size of matrix %D"
,ncnt,nlocal)
;
7185 ierr = PetscFree(mat->bsizes)((*PetscTrFree)((void*)(mat->bsizes),7185,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((mat->bsizes) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7185,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7186 mat->nblocks = nblocks;
7187 ierr = PetscMalloc1(nblocks,&mat->bsizes)PetscMallocA(1,PETSC_FALSE,7187,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nblocks)*sizeof(**(&mat->bsizes)),(&mat->
bsizes))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7187,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7188 ierr = PetscArraycpy(mat->bsizes,bsizes,nblocks)((sizeof(*(mat->bsizes)) != sizeof(*(bsizes))) || PetscMemcpy
(mat->bsizes,bsizes,(nblocks)*sizeof(*(mat->bsizes))));
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7188,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7189 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7190}
7191
7192/*@C
7193 MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7194
7195 Logically Collective on Mat
7196
7197 Input Parameters:
7198. mat - the matrix
7199
7200 Output Parameters:
7201+ nblocks - the number of blocks on this process
7202- bsizes - the block sizes
7203
7204 Notes: Currently not supported from Fortran
7205
7206 Level: intermediate
7207
7208.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSizes(), MatGetBlockSizes(), MatSetVariableBlockSizes()
7209@*/
7210PetscErrorCode MatGetVariableBlockSizes(Mat mat,PetscInt *nblocks,const PetscInt **bsizes)
7211{
7212 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7212; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7213 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7213,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7213,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7213,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7213,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7214 *nblocks = mat->nblocks;
7215 *bsizes = mat->bsizes;
7216 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7217}
7218
7219/*@
7220 MatSetBlockSizes - Sets the matrix block row and column sizes.
7221
7222 Logically Collective on Mat
7223
7224 Input Parameters:
7225+ mat - the matrix
7226- rbs - row block size
7227- cbs - column block size
7228
7229 Notes:
7230 Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7231 If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7232 This must be called before MatSetUp() or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later
7233
7234 For MATMPIAIJ and MATSEQAIJ matrix formats, this function can be called at a later stage, provided that the specified block sizes
7235 are compatible with the matrix local sizes.
7236
7237 The row and column block size determine the blocksize of the "row" and "column" vectors returned by MatCreateVecs().
7238
7239 Level: intermediate
7240
7241.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSize(), MatGetBlockSizes()
7242@*/
7243PetscErrorCode MatSetBlockSizes(Mat mat,PetscInt rbs,PetscInt cbs)
7244{
7245 PetscErrorCode ierr;
7246
7247 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7247; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7248 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7248,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7248,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7249 PetscValidLogicalCollectiveInt(mat,rbs,2)do { PetscErrorCode _7_ierr; PetscInt b1[2],b2[2]; b1[0] = -rbs
; b1[1] = rbs; _7_ierr = (PetscAllreduceBarrierCheck(PetscObjectComm
((PetscObject)mat),2,7249,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),7249,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),7249
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Int value must be same on all processes, argument # %d"
,2); } while (0)
;
7250 PetscValidLogicalCollectiveInt(mat,cbs,3)do { PetscErrorCode _7_ierr; PetscInt b1[2],b2[2]; b1[0] = -cbs
; b1[1] = cbs; _7_ierr = (PetscAllreduceBarrierCheck(PetscObjectComm
((PetscObject)mat),2,7250,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),7250,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),7250
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Int value must be same on all processes, argument # %d"
,3); } while (0)
;
7251 if (mat->ops->setblocksizes) {
7252 ierr = (*mat->ops->setblocksizes)(mat,rbs,cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7252,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7253 }
7254 if (mat->rmap->refcnt) {
7255 ISLocalToGlobalMapping l2g = NULL((void*)0);
7256 PetscLayout nmap = NULL((void*)0);
7257
7258 ierr = PetscLayoutDuplicate(mat->rmap,&nmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7258,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7259 if (mat->rmap->mapping) {
7260 ierr = ISLocalToGlobalMappingDuplicate(mat->rmap->mapping,&l2g);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7260,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7261 }
7262 ierr = PetscLayoutDestroy(&mat->rmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7262,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7263 mat->rmap = nmap;
7264 mat->rmap->mapping = l2g;
7265 }
7266 if (mat->cmap->refcnt) {
7267 ISLocalToGlobalMapping l2g = NULL((void*)0);
7268 PetscLayout nmap = NULL((void*)0);
7269
7270 ierr = PetscLayoutDuplicate(mat->cmap,&nmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7270,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7271 if (mat->cmap->mapping) {
7272 ierr = ISLocalToGlobalMappingDuplicate(mat->cmap->mapping,&l2g);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7272,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7273 }
7274 ierr = PetscLayoutDestroy(&mat->cmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7275 mat->cmap = nmap;
7276 mat->cmap->mapping = l2g;
7277 }
7278 ierr = PetscLayoutSetBlockSize(mat->rmap,rbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7279 ierr = PetscLayoutSetBlockSize(mat->cmap,cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7279,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7280 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7281}
7282
7283/*@
7284 MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7285
7286 Logically Collective on Mat
7287
7288 Input Parameters:
7289+ mat - the matrix
7290. fromRow - matrix from which to copy row block size
7291- fromCol - matrix from which to copy column block size (can be same as fromRow)
7292
7293 Level: developer
7294
7295.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSizes()
7296@*/
7297PetscErrorCode MatSetBlockSizesFromMats(Mat mat,Mat fromRow,Mat fromCol)
7298{
7299 PetscErrorCode ierr;
7300
7301 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7301; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7302 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7302,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7302,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7303 PetscValidHeaderSpecific(fromRow,MAT_CLASSID,2)do { if (!fromRow) return PetscError(((MPI_Comm)0x44000001),7303
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(fromRow,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),7303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(fromRow))->classid != MAT_CLASSID)
{ if (((PetscObject)(fromRow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),7303,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
7304 PetscValidHeaderSpecific(fromCol,MAT_CLASSID,3)do { if (!fromCol) return PetscError(((MPI_Comm)0x44000001),7304
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(fromCol,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),7304,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(fromCol))->classid != MAT_CLASSID)
{ if (((PetscObject)(fromCol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7304,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7304,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7305 if (fromRow->rmap->bs > 0) {ierr = PetscLayoutSetBlockSize(mat->rmap,fromRow->rmap->bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7305,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
7306 if (fromCol->cmap->bs > 0) {ierr = PetscLayoutSetBlockSize(mat->cmap,fromCol->cmap->bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7306,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
7307 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7308}
7309
7310/*@
7311 MatResidual - Default routine to calculate the residual.
7312
7313 Collective on Mat
7314
7315 Input Parameters:
7316+ mat - the matrix
7317. b - the right-hand-side
7318- x - the approximate solution
7319
7320 Output Parameter:
7321. r - location to store the residual
7322
7323 Level: developer
7324
7325.seealso: PCMGSetResidual()
7326@*/
7327PetscErrorCode MatResidual(Mat mat,Vec b,Vec x,Vec r)
7328{
7329 PetscErrorCode ierr;
7330
7331 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7331; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7332 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7332,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7332,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7332,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7332,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7333 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),7333,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7333,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7333,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),7333,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
7334 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),7334,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7334,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7335 PetscValidHeaderSpecific(r,VEC_CLASSID,4)do { if (!r) return PetscError(((MPI_Comm)0x44000001),7335,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(r,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7335,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(r))->classid != VEC_CLASSID) { if (
((PetscObject)(r))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7335,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),7335,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
7336 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7336,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7337 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7337,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7338 ierr = PetscLogEventBegin(MAT_Residual,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Residual].active) ? (*PetscLogPLB)((MAT_Residual),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7338,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7339 if (!mat->ops->residual) {
7340 ierr = MatMult(mat,x,r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7340,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7341 ierr = VecAYPX(r,-1.0,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7341,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7342 } else {
7343 ierr = (*mat->ops->residual)(mat,b,x,r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7343,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7344 }
7345 ierr = PetscLogEventEnd(MAT_Residual,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Residual].active) ? (*PetscLogPLE)((MAT_Residual),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7345,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7346 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7347}
7348
7349/*@C
7350 MatGetRowIJ - Returns the compressed row storage i and j indices for sequential matrices.
7351
7352 Collective on Mat
7353
7354 Input Parameters:
7355+ mat - the matrix
7356. shift - 0 or 1 indicating we want the indices starting at 0 or 1
7357. symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be symmetrized
7358- inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7359 inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7360 always used.
7361
7362 Output Parameters:
7363+ n - number of rows in the (possibly compressed) matrix
7364. ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7365. ja - the column indices
7366- done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7367 are responsible for handling the case when done == PETSC_FALSE and ia and ja are not set
7368
7369 Level: developer
7370
7371 Notes:
7372 You CANNOT change any of the ia[] or ja[] values.
7373
7374 Use MatRestoreRowIJ() when you are finished accessing the ia[] and ja[] values.
7375
7376 Fortran Notes:
7377 In Fortran use
7378$
7379$ PetscInt ia(1), ja(1)
7380$ PetscOffset iia, jja
7381$ call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,iia,ja,jja,done,ierr)
7382$ ! Access the ith and jth entries via ia(iia + i) and ja(jja + j)
7383
7384 or
7385$
7386$ PetscInt, pointer :: ia(:),ja(:)
7387$ call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
7388$ ! Access the ith and jth entries via ia(i) and ja(j)
7389
7390.seealso: MatGetColumnIJ(), MatRestoreRowIJ(), MatSeqAIJGetArray()
7391@*/
7392PetscErrorCode MatGetRowIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool *done)
7393{
7394 PetscErrorCode ierr;
7395
7396 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7396; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7397 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7397,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7397,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7397,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7397,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7398 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7398,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7399 PetscValidIntPointer(n,5)do { if (!n) return PetscError(((MPI_Comm)0x44000001),7399,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7399,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",5); } while (0)
;
7400 if (ia) PetscValidIntPointer(ia,6)do { if (!ia) return PetscError(((MPI_Comm)0x44000001),7400,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(ia
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7400,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",6); } while (0)
;
7401 if (ja) PetscValidIntPointer(ja,7)do { if (!ja) return PetscError(((MPI_Comm)0x44000001),7401,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",7); if (!PetscCheckPointer(ja
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7401,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",7); } while (0)
;
7402 PetscValidIntPointer(done,8)do { if (!done) return PetscError(((MPI_Comm)0x44000001),7402
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",8); if
(!PetscCheckPointer(done,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7402,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,8); } while (0)
;
7403 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7403,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7404 if (!mat->ops->getrowij) *done = PETSC_FALSE;
7405 else {
7406 *done = PETSC_TRUE;
7407 ierr = PetscLogEventBegin(MAT_GetRowIJ,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetRowIJ].active) ? (*PetscLogPLB)((MAT_GetRowIJ),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7407,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7408 ierr = (*mat->ops->getrowij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7408,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7409 ierr = PetscLogEventEnd(MAT_GetRowIJ,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetRowIJ].active) ? (*PetscLogPLE)((MAT_GetRowIJ),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7409,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7410 }
7411 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7412}
7413
7414/*@C
7415 MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
7416
7417 Collective on Mat
7418
7419 Input Parameters:
7420+ mat - the matrix
7421. shift - 1 or zero indicating we want the indices starting at 0 or 1
7422. symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7423 symmetrized
7424. inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7425 inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7426 always used.
7427. n - number of columns in the (possibly compressed) matrix
7428. ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
7429- ja - the row indices
7430
7431 Output Parameters:
7432. done - PETSC_TRUE or PETSC_FALSE, indicating whether the values have been returned
7433
7434 Level: developer
7435
7436.seealso: MatGetRowIJ(), MatRestoreColumnIJ()
7437@*/
7438PetscErrorCode MatGetColumnIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool *done)
7439{
7440 PetscErrorCode ierr;
7441
7442 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7442; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7443 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7443,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7443,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7443,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7443,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7444 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7444,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7445 PetscValidIntPointer(n,4)do { if (!n) return PetscError(((MPI_Comm)0x44000001),7445,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",4); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7445,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",4); } while (0)
;
7446 if (ia) PetscValidIntPointer(ia,5)do { if (!ia) return PetscError(((MPI_Comm)0x44000001),7446,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(ia
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7446,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",5); } while (0)
;
7447 if (ja) PetscValidIntPointer(ja,6)do { if (!ja) return PetscError(((MPI_Comm)0x44000001),7447,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(ja
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7447,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",6); } while (0)
;
7448 PetscValidIntPointer(done,7)do { if (!done) return PetscError(((MPI_Comm)0x44000001),7448
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",7); if
(!PetscCheckPointer(done,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7448,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,7); } while (0)
;
7449 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7449,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7450 if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
7451 else {
7452 *done = PETSC_TRUE;
7453 ierr = (*mat->ops->getcolumnij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7453,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7454 }
7455 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7456}
7457
7458/*@C
7459 MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with
7460 MatGetRowIJ().
7461
7462 Collective on Mat
7463
7464 Input Parameters:
7465+ mat - the matrix
7466. shift - 1 or zero indicating we want the indices starting at 0 or 1
7467. symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7468 symmetrized
7469. inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7470 inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7471 always used.
7472. n - size of (possibly compressed) matrix
7473. ia - the row pointers
7474- ja - the column indices
7475
7476 Output Parameters:
7477. done - PETSC_TRUE or PETSC_FALSE indicated that the values have been returned
7478
7479 Note:
7480 This routine zeros out n, ia, and ja. This is to prevent accidental
7481 us of the array after it has been restored. If you pass NULL, it will
7482 not zero the pointers. Use of ia or ja after MatRestoreRowIJ() is invalid.
7483
7484 Level: developer
7485
7486.seealso: MatGetRowIJ(), MatRestoreColumnIJ()
7487@*/
7488PetscErrorCode MatRestoreRowIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool *done)
7489{
7490 PetscErrorCode ierr;
7491
7492 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7492; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7493 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7493,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7493,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7493,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7493,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7494 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7494,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7495 if (ia) PetscValidIntPointer(ia,6)do { if (!ia) return PetscError(((MPI_Comm)0x44000001),7495,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(ia
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7495,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",6); } while (0)
;
7496 if (ja) PetscValidIntPointer(ja,7)do { if (!ja) return PetscError(((MPI_Comm)0x44000001),7496,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",7); if (!PetscCheckPointer(ja
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7496,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",7); } while (0)
;
7497 PetscValidIntPointer(done,8)do { if (!done) return PetscError(((MPI_Comm)0x44000001),7497
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",8); if
(!PetscCheckPointer(done,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7497,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,8); } while (0)
;
7498 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7498,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7499
7500 if (!mat->ops->restorerowij) *done = PETSC_FALSE;
7501 else {
7502 *done = PETSC_TRUE;
7503 ierr = (*mat->ops->restorerowij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7503,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7504 if (n) *n = 0;
7505 if (ia) *ia = NULL((void*)0);
7506 if (ja) *ja = NULL((void*)0);
7507 }
7508 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7509}
7510
7511/*@C
7512 MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with
7513 MatGetColumnIJ().
7514
7515 Collective on Mat
7516
7517 Input Parameters:
7518+ mat - the matrix
7519. shift - 1 or zero indicating we want the indices starting at 0 or 1
7520- symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7521 symmetrized
7522- inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7523 inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7524 always used.
7525
7526 Output Parameters:
7527+ n - size of (possibly compressed) matrix
7528. ia - the column pointers
7529. ja - the row indices
7530- done - PETSC_TRUE or PETSC_FALSE indicated that the values have been returned
7531
7532 Level: developer
7533
7534.seealso: MatGetColumnIJ(), MatRestoreRowIJ()
7535@*/
7536PetscErrorCode MatRestoreColumnIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool *done)
7537{
7538 PetscErrorCode ierr;
7539
7540 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7540; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7541 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7541,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7541,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7541,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7541,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7542 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7542,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7543 if (ia) PetscValidIntPointer(ia,5)do { if (!ia) return PetscError(((MPI_Comm)0x44000001),7543,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(ia
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7543,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",5); } while (0)
;
7544 if (ja) PetscValidIntPointer(ja,6)do { if (!ja) return PetscError(((MPI_Comm)0x44000001),7544,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(ja
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7544,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to Int: Parameter # %d",6); } while (0)
;
7545 PetscValidIntPointer(done,7)do { if (!done) return PetscError(((MPI_Comm)0x44000001),7545
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",7); if
(!PetscCheckPointer(done,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7545,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,7); } while (0)
;
7546 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7547
7548 if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
7549 else {
7550 *done = PETSC_TRUE;
7551 ierr = (*mat->ops->restorecolumnij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7551,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7552 if (n) *n = 0;
7553 if (ia) *ia = NULL((void*)0);
7554 if (ja) *ja = NULL((void*)0);
7555 }
7556 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7557}
7558
7559/*@C
7560 MatColoringPatch -Used inside matrix coloring routines that
7561 use MatGetRowIJ() and/or MatGetColumnIJ().
7562
7563 Collective on Mat
7564
7565 Input Parameters:
7566+ mat - the matrix
7567. ncolors - max color value
7568. n - number of entries in colorarray
7569- colorarray - array indicating color for each column
7570
7571 Output Parameters:
7572. iscoloring - coloring generated using colorarray information
7573
7574 Level: developer
7575
7576.seealso: MatGetRowIJ(), MatGetColumnIJ()
7577
7578@*/
7579PetscErrorCode MatColoringPatch(Mat mat,PetscInt ncolors,PetscInt n,ISColoringValue colorarray[],ISColoring *iscoloring)
7580{
7581 PetscErrorCode ierr;
7582
7583 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7583; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7584 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7584,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7584,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7584,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7584,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7585 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7585,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7586 PetscValidIntPointer(colorarray,4)do { if (!colorarray) return PetscError(((MPI_Comm)0x44000001
),7586,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(colorarray,PETSC_INT)) return PetscError
(((MPI_Comm)0x44000001),7586,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to Int: Parameter # %d"
,4); } while (0)
;
7587 PetscValidPointer(iscoloring,5)do { if (!iscoloring) return PetscError(((MPI_Comm)0x44000001
),7587,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(iscoloring,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),7587,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
7588 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7588,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7589
7590 if (!mat->ops->coloringpatch) {
7591 ierr = ISColoringCreate(PetscObjectComm((PetscObject)mat),ncolors,n,colorarray,PETSC_OWN_POINTER,iscoloring);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7591,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7592 } else {
7593 ierr = (*mat->ops->coloringpatch)(mat,ncolors,n,colorarray,iscoloring);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7593,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7594 }
7595 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7596}
7597
7598
7599/*@
7600 MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
7601
7602 Logically Collective on Mat
7603
7604 Input Parameter:
7605. mat - the factored matrix to be reset
7606
7607 Notes:
7608 This routine should be used only with factored matrices formed by in-place
7609 factorization via ILU(0) (or by in-place LU factorization for the MATSEQDENSE
7610 format). This option can save memory, for example, when solving nonlinear
7611 systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
7612 ILU(0) preconditioner.
7613
7614 Note that one can specify in-place ILU(0) factorization by calling
7615.vb
7616 PCType(pc,PCILU);
7617 PCFactorSeUseInPlace(pc);
7618.ve
7619 or by using the options -pc_type ilu -pc_factor_in_place
7620
7621 In-place factorization ILU(0) can also be used as a local
7622 solver for the blocks within the block Jacobi or additive Schwarz
7623 methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
7624 for details on setting local solver options.
7625
7626 Most users should employ the simplified KSP interface for linear solvers
7627 instead of working directly with matrix algebra routines such as this.
7628 See, e.g., KSPCreate().
7629
7630 Level: developer
7631
7632.seealso: PCFactorSetUseInPlace(), PCFactorGetUseInPlace()
7633
7634@*/
7635PetscErrorCode MatSetUnfactored(Mat mat)
7636{
7637 PetscErrorCode ierr;
7638
7639 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7639; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7640 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7640,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7640,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7640,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7640,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7641 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7641,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7642 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7642,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7643 mat->factortype = MAT_FACTOR_NONE;
7644 if (!mat->ops->setunfactored) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7645 ierr = (*mat->ops->setunfactored)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7645,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7646 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7647}
7648
7649/*MC
7650 MatDenseGetArrayF90 - Accesses a matrix array from Fortran90.
7651
7652 Synopsis:
7653 MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
7654
7655 Not collective
7656
7657 Input Parameter:
7658. x - matrix
7659
7660 Output Parameters:
7661+ xx_v - the Fortran90 pointer to the array
7662- ierr - error code
7663
7664 Example of Usage:
7665.vb
7666 PetscScalar, pointer xx_v(:,:)
7667 ....
7668 call MatDenseGetArrayF90(x,xx_v,ierr)
7669 a = xx_v(3)
7670 call MatDenseRestoreArrayF90(x,xx_v,ierr)
7671.ve
7672
7673 Level: advanced
7674
7675.seealso: MatDenseRestoreArrayF90(), MatDenseGetArray(), MatDenseRestoreArray(), MatSeqAIJGetArrayF90()
7676
7677M*/
7678
7679/*MC
7680 MatDenseRestoreArrayF90 - Restores a matrix array that has been
7681 accessed with MatDenseGetArrayF90().
7682
7683 Synopsis:
7684 MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
7685
7686 Not collective
7687
7688 Input Parameters:
7689+ x - matrix
7690- xx_v - the Fortran90 pointer to the array
7691
7692 Output Parameter:
7693. ierr - error code
7694
7695 Example of Usage:
7696.vb
7697 PetscScalar, pointer xx_v(:,:)
7698 ....
7699 call MatDenseGetArrayF90(x,xx_v,ierr)
7700 a = xx_v(3)
7701 call MatDenseRestoreArrayF90(x,xx_v,ierr)
7702.ve
7703
7704 Level: advanced
7705
7706.seealso: MatDenseGetArrayF90(), MatDenseGetArray(), MatDenseRestoreArray(), MatSeqAIJRestoreArrayF90()
7707
7708M*/
7709
7710
7711/*MC
7712 MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran90.
7713
7714 Synopsis:
7715 MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
7716
7717 Not collective
7718
7719 Input Parameter:
7720. x - matrix
7721
7722 Output Parameters:
7723+ xx_v - the Fortran90 pointer to the array
7724- ierr - error code
7725
7726 Example of Usage:
7727.vb
7728 PetscScalar, pointer xx_v(:)
7729 ....
7730 call MatSeqAIJGetArrayF90(x,xx_v,ierr)
7731 a = xx_v(3)
7732 call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
7733.ve
7734
7735 Level: advanced
7736
7737.seealso: MatSeqAIJRestoreArrayF90(), MatSeqAIJGetArray(), MatSeqAIJRestoreArray(), MatDenseGetArrayF90()
7738
7739M*/
7740
7741/*MC
7742 MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
7743 accessed with MatSeqAIJGetArrayF90().
7744
7745 Synopsis:
7746 MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
7747
7748 Not collective
7749
7750 Input Parameters:
7751+ x - matrix
7752- xx_v - the Fortran90 pointer to the array
7753
7754 Output Parameter:
7755. ierr - error code
7756
7757 Example of Usage:
7758.vb
7759 PetscScalar, pointer xx_v(:)
7760 ....
7761 call MatSeqAIJGetArrayF90(x,xx_v,ierr)
7762 a = xx_v(3)
7763 call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
7764.ve
7765
7766 Level: advanced
7767
7768.seealso: MatSeqAIJGetArrayF90(), MatSeqAIJGetArray(), MatSeqAIJRestoreArray(), MatDenseRestoreArrayF90()
7769
7770M*/
7771
7772
7773/*@
7774 MatCreateSubMatrix - Gets a single submatrix on the same number of processors
7775 as the original matrix.
7776
7777 Collective on Mat
7778
7779 Input Parameters:
7780+ mat - the original matrix
7781. isrow - parallel IS containing the rows this processor should obtain
7782. iscol - parallel IS containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
7783- cll - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
7784
7785 Output Parameter:
7786. newmat - the new submatrix, of the same type as the old
7787
7788 Level: advanced
7789
7790 Notes:
7791 The submatrix will be able to be multiplied with vectors using the same layout as iscol.
7792
7793 Some matrix types place restrictions on the row and column indices, such
7794 as that they be sorted or that they be equal to each other.
7795
7796 The index sets may not have duplicate entries.
7797
7798 The first time this is called you should use a cll of MAT_INITIAL_MATRIX,
7799 the MatCreateSubMatrix() routine will create the newmat for you. Any additional calls
7800 to this routine with a mat of the same nonzero structure and with a call of MAT_REUSE_MATRIX
7801 will reuse the matrix generated the first time. You should call MatDestroy() on newmat when
7802 you are finished using it.
7803
7804 The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
7805 the input matrix.
7806
7807 If iscol is NULL then all columns are obtained (not supported in Fortran).
7808
7809 Example usage:
7810 Consider the following 8x8 matrix with 34 non-zero values, that is
7811 assembled across 3 processors. Let's assume that proc0 owns 3 rows,
7812 proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
7813 as follows:
7814
7815.vb
7816 1 2 0 | 0 3 0 | 0 4
7817 Proc0 0 5 6 | 7 0 0 | 8 0
7818 9 0 10 | 11 0 0 | 12 0
7819 -------------------------------------
7820 13 0 14 | 15 16 17 | 0 0
7821 Proc1 0 18 0 | 19 20 21 | 0 0
7822 0 0 0 | 22 23 0 | 24 0
7823 -------------------------------------
7824 Proc2 25 26 27 | 0 0 28 | 29 0
7825 30 0 0 | 31 32 33 | 0 34
7826.ve
7827
7828 Suppose isrow = [0 1 | 4 | 6 7] and iscol = [1 2 | 3 4 5 | 6]. The resulting submatrix is
7829
7830.vb
7831 2 0 | 0 3 0 | 0
7832 Proc0 5 6 | 7 0 0 | 8
7833 -------------------------------
7834 Proc1 18 0 | 19 20 21 | 0
7835 -------------------------------
7836 Proc2 26 27 | 0 0 28 | 29
7837 0 0 | 31 32 33 | 0
7838.ve
7839
7840
7841.seealso: MatCreateSubMatrices()
7842@*/
7843PetscErrorCode MatCreateSubMatrix(Mat mat,IS isrow,IS iscol,MatReuse cll,Mat *newmat)
7844{
7845 PetscErrorCode ierr;
7846 PetscMPIInt size;
7847 Mat *local;
7848 IS iscoltmp;
7849
7850 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7850; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7851 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7851,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7851,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7851,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7851,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7852 PetscValidHeaderSpecific(isrow,IS_CLASSID,2)do { if (!isrow) return PetscError(((MPI_Comm)0x44000001),7852
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(isrow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7852,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(isrow))->classid != IS_CLASSID) { if
(((PetscObject)(isrow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7852,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),7852,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
7853 if (iscol) PetscValidHeaderSpecific(iscol,IS_CLASSID,3)do { if (!iscol) return PetscError(((MPI_Comm)0x44000001),7853
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(iscol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7853,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(iscol))->classid != IS_CLASSID) { if
(((PetscObject)(iscol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7853,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7853,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7854 PetscValidPointer(newmat,5)do { if (!newmat) return PetscError(((MPI_Comm)0x44000001),7854
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(newmat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),7854,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
7855 if (cll == MAT_REUSE_MATRIX) PetscValidHeaderSpecific(*newmat,MAT_CLASSID,5)do { if (!*newmat) return PetscError(((MPI_Comm)0x44000001),7855
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",5); if (
!PetscCheckPointer(*newmat,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),7855,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(*newmat))->classid != MAT_CLASSID)
{ if (((PetscObject)(*newmat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7855,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),7855,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",5); } } while (0)
;
7856 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7856,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7857 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),7857,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
7858 if (cll == MAT_IGNORE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Cannot use MAT_IGNORE_MATRIX")return PetscError(PetscObjectComm((PetscObject)mat),7858,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Cannot use MAT_IGNORE_MATRIX")
;
7859
7860 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7860,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7861 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7861,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7862
7863 if (!iscol || isrow == iscol) {
7864 PetscBool stride;
7865 PetscMPIInt grabentirematrix = 0,grab;
7866 ierr = PetscObjectTypeCompare((PetscObject)isrow,ISSTRIDE"stride",&stride);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7866,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7867 if (stride) {
7868 PetscInt first,step,n,rstart,rend;
7869 ierr = ISStrideGetInfo(isrow,&first,&step);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7869,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7870 if (step == 1) {
7871 ierr = MatGetOwnershipRange(mat,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7871,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7872 if (rstart == first) {
7873 ierr = ISGetLocalSize(isrow,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7873,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7874 if (n == rend-rstart) {
7875 grabentirematrix = 1;
7876 }
7877 }
7878 }
7879 }
7880 ierr = MPIU_Allreduce(&grabentirematrix,&grab,1,MPI_INT,MPI_MIN,PetscObjectComm((PetscObject)mat))(PetscAllreduceBarrierCheck(PetscObjectComm((PetscObject)mat)
,1,7880,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((&grabentirematrix
),(&grab),(1),(((MPI_Datatype)0x4c000405)),((MPI_Op)(0x58000002
)),(PetscObjectComm((PetscObject)mat)))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7880,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7881 if (grab) {
7882 ierr = PetscInfo(mat,"Getting entire matrix as submatrix\n")PetscInfo_Private(__func__,mat,"Getting entire matrix as submatrix\n"
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7882,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7883 if (cll == MAT_INITIAL_MATRIX) {
7884 *newmat = mat;
7885 ierr = PetscObjectReference((PetscObject)mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7885,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7886 }
7887 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7888 }
7889 }
7890
7891 if (!iscol) {
7892 ierr = ISCreateStride(PetscObjectComm((PetscObject)mat),mat->cmap->n,mat->cmap->rstart,1,&iscoltmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7892,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7893 } else {
7894 iscoltmp = iscol;
7895 }
7896
7897 /* if original matrix is on just one processor then use submatrix generated */
7898 if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
7899 ierr = MatCreateSubMatrices(mat,1,&isrow,&iscoltmp,MAT_REUSE_MATRIX,&newmat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7899,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7900 goto setproperties;
7901 } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
7902 ierr = MatCreateSubMatrices(mat,1,&isrow,&iscoltmp,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7902,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7903 *newmat = *local;
7904 ierr = PetscFree(local)((*PetscTrFree)((void*)(local),7904,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((local) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7904,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7905 goto setproperties;
7906 } else if (!mat->ops->createsubmatrix) {
7907 /* Create a new matrix type that implements the operation using the full matrix */
7908 ierr = PetscLogEventBegin(MAT_CreateSubMat,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMat].active) ? (*PetscLogPLB)((MAT_CreateSubMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7908,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7909 switch (cll) {
7910 case MAT_INITIAL_MATRIX:
7911 ierr = MatCreateSubMatrixVirtual(mat,isrow,iscoltmp,newmat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7911,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7912 break;
7913 case MAT_REUSE_MATRIX:
7914 ierr = MatSubMatrixVirtualUpdate(*newmat,mat,isrow,iscoltmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7914,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7915 break;
7916 default: SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX")return PetscError(PetscObjectComm((PetscObject)mat),7916,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX"
)
;
7917 }
7918 ierr = PetscLogEventEnd(MAT_CreateSubMat,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMat].active) ? (*PetscLogPLE)((MAT_CreateSubMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7918,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7919 goto setproperties;
7920 }
7921
7922 if (!mat->ops->createsubmatrix) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),7922,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
7923 ierr = PetscLogEventBegin(MAT_CreateSubMat,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMat].active) ? (*PetscLogPLB)((MAT_CreateSubMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7923,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7924 ierr = (*mat->ops->createsubmatrix)(mat,isrow,iscoltmp,cll,newmat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7924,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7925 ierr = PetscLogEventEnd(MAT_CreateSubMat,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMat].active) ? (*PetscLogPLE)((MAT_CreateSubMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7925,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7926
7927 /* Propagate symmetry information for diagonal blocks */
7928setproperties:
7929 if (isrow == iscoltmp) {
7930 if (mat->symmetric_set && mat->symmetric) {
7931 ierr = MatSetOption(*newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7931,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7932 }
7933 if (mat->structurally_symmetric_set && mat->structurally_symmetric) {
7934 ierr = MatSetOption(*newmat,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7934,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7935 }
7936 if (mat->hermitian_set && mat->hermitian) {
7937 ierr = MatSetOption(*newmat,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7937,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7938 }
7939 if (mat->spd_set && mat->spd) {
7940 ierr = MatSetOption(*newmat,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7940,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7941 }
7942 }
7943
7944 if (!iscol) {ierr = ISDestroy(&iscoltmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7944,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
7945 if (*newmat && cll == MAT_INITIAL_MATRIX) {ierr = PetscObjectStateIncrease((PetscObject)*newmat)(((PetscObject)*newmat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7945,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
7946 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7947}
7948
7949/*@
7950 MatStashSetInitialSize - sets the sizes of the matrix stash, that is
7951 used during the assembly process to store values that belong to
7952 other processors.
7953
7954 Not Collective
7955
7956 Input Parameters:
7957+ mat - the matrix
7958. size - the initial size of the stash.
7959- bsize - the initial size of the block-stash(if used).
7960
7961 Options Database Keys:
7962+ -matstash_initial_size <size> or <size0,size1,...sizep-1>
7963- -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1>
7964
7965 Level: intermediate
7966
7967 Notes:
7968 The block-stash is used for values set with MatSetValuesBlocked() while
7969 the stash is used for values set with MatSetValues()
7970
7971 Run with the option -info and look for output of the form
7972 MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
7973 to determine the appropriate value, MM, to use for size and
7974 MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
7975 to determine the value, BMM to use for bsize
7976
7977
7978.seealso: MatAssemblyBegin(), MatAssemblyEnd(), Mat, MatStashGetInfo()
7979
7980@*/
7981PetscErrorCode MatStashSetInitialSize(Mat mat,PetscInt size, PetscInt bsize)
7982{
7983 PetscErrorCode ierr;
7984
7985 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7985; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7986 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7986,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7986,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7987 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7987,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7988 ierr = MatStashSetInitialSize_Private(&mat->stash,size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7989 ierr = MatStashSetInitialSize_Private(&mat->bstash,bsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7989,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7990 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7991}
7992
7993/*@
7994 MatInterpolateAdd - w = y + A*x or A'*x depending on the shape of
7995 the matrix
7996
7997 Neighbor-wise Collective on Mat
7998
7999 Input Parameters:
8000+ mat - the matrix
8001. x,y - the vectors
8002- w - where the result is stored
8003
8004 Level: intermediate
8005
8006 Notes:
8007 w may be the same vector as y.
8008
8009 This allows one to use either the restriction or interpolation (its transpose)
8010 matrix to do the interpolation
8011
8012.seealso: MatMultAdd(), MatMultTransposeAdd(), MatRestrict()
8013
8014@*/
8015PetscErrorCode MatInterpolateAdd(Mat A,Vec x,Vec y,Vec w)
8016{
8017 PetscErrorCode ierr;
8018 PetscInt M,N,Ny;
8019
8020 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8020; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8021 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8021,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8021,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8021,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8021,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8022 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),8022,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8022,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8022,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8022,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8023 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),8023,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8023,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8023,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),8023,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
8024 PetscValidHeaderSpecific(w,VEC_CLASSID,4)do { if (!w) return PetscError(((MPI_Comm)0x44000001),8024,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(w,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(w))->classid != VEC_CLASSID) { if (
((PetscObject)(w))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),8024,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
8025 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),8025,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
8026 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8026,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
8027 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8027,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8028 ierr = VecGetSize(y,&Ny);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8028,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8029 if (M == Ny) {
8030 ierr = MatMultAdd(A,x,y,w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8030,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8031 } else {
8032 ierr = MatMultTransposeAdd(A,x,y,w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8032,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8033 }
8034 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8035}
8036
8037/*@
8038 MatInterpolate - y = A*x or A'*x depending on the shape of
8039 the matrix
8040
8041 Neighbor-wise Collective on Mat
8042
8043 Input Parameters:
8044+ mat - the matrix
8045- x,y - the vectors
8046
8047 Level: intermediate
8048
8049 Notes:
8050 This allows one to use either the restriction or interpolation (its transpose)
8051 matrix to do the interpolation
8052
8053.seealso: MatMultAdd(), MatMultTransposeAdd(), MatRestrict()
8054
8055@*/
8056PetscErrorCode MatInterpolate(Mat A,Vec x,Vec y)
8057{
8058 PetscErrorCode ierr;
8059 PetscInt M,N,Ny;
8060
8061 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8061; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8062 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8062,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8062,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8062,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8062,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8063 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),8063,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8063,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8063,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8063,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8064 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),8064,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8064,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8064,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),8064,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
8065 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),8065,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
8066 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8066,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
8067 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8067,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8068 ierr = VecGetSize(y,&Ny);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8068,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8069 if (M == Ny) {
8070 ierr = MatMult(A,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8070,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8071 } else {
8072 ierr = MatMultTranspose(A,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8072,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8073 }
8074 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8075}
8076
8077/*@
8078 MatRestrict - y = A*x or A'*x
8079
8080 Neighbor-wise Collective on Mat
8081
8082 Input Parameters:
8083+ mat - the matrix
8084- x,y - the vectors
8085
8086 Level: intermediate
8087
8088 Notes:
8089 This allows one to use either the restriction or interpolation (its transpose)
8090 matrix to do the restriction
8091
8092.seealso: MatMultAdd(), MatMultTransposeAdd(), MatInterpolate()
8093
8094@*/
8095PetscErrorCode MatRestrict(Mat A,Vec x,Vec y)
8096{
8097 PetscErrorCode ierr;
8098 PetscInt M,N,Ny;
8099
8100 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8100; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8101 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8101,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8101,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8101,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8101,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8102 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),8102,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8102,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8102,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8102,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8103 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),8103,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8103,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8103,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),8103,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
8104 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),8104,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
8105 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8105,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
8106
8107 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8107,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8108 ierr = VecGetSize(y,&Ny);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8108,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8109 if (M == Ny) {
8110 ierr = MatMult(A,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8110,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8111 } else {
8112 ierr = MatMultTranspose(A,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8112,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8113 }
8114 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8115}
8116
8117/*@
8118 MatGetNullSpace - retrieves the null space of a matrix.
8119
8120 Logically Collective on Mat
8121
8122 Input Parameters:
8123+ mat - the matrix
8124- nullsp - the null space object
8125
8126 Level: developer
8127
8128.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNearNullSpace(), MatSetNullSpace()
8129@*/
8130PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8131{
8132 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8132; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8133 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8133,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8133,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8133,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8133,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8134 PetscValidPointer(nullsp,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8134
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(nullsp,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8134,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8135 *nullsp = (mat->symmetric_set && mat->symmetric && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8136 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8137}
8138
8139/*@
8140 MatSetNullSpace - attaches a null space to a matrix.
8141
8142 Logically Collective on Mat
8143
8144 Input Parameters:
8145+ mat - the matrix
8146- nullsp - the null space object
8147
8148 Level: advanced
8149
8150 Notes:
8151 This null space is used by the linear solvers. Overwrites any previous null space that may have been attached
8152
8153 For inconsistent singular systems (linear systems where the right hand side is not in the range of the operator) you also likely should
8154 call MatSetTransposeNullSpace(). This allows the linear system to be solved in a least squares sense.
8155
8156 You can remove the null space by calling this routine with an nullsp of NULL
8157
8158
8159 The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8160 the domain of a matrix A (from R^n to R^m (m rows, n columns) R^n = the direct sum of the null space of A, n(A), + the range of A^T, R(A^T).
8161 Similarly R^m = direct sum n(A^T) + R(A). Hence the linear system A x = b has a solution only if b in R(A) (or correspondingly b is orthogonal to
8162 n(A^T)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
8163 the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n(A^T).
8164
8165 Krylov solvers can produce the minimal norm solution to the least squares problem by utilizing MatNullSpaceRemove().
8166
8167 If the matrix is known to be symmetric because it is an SBAIJ matrix or one as called MatSetOption(mat,MAT_SYMMETRIC or MAT_SYMMETRIC_ETERNAL,PETSC_TRUE); this
8168 routine also automatically calls MatSetTransposeNullSpace().
8169
8170.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNearNullSpace(), MatGetNullSpace(), MatSetTransposeNullSpace(), MatGetTransposeNullSpace(), MatNullSpaceRemove()
8171@*/
8172PetscErrorCode MatSetNullSpace(Mat mat,MatNullSpace nullsp)
8173{
8174 PetscErrorCode ierr;
8175
8176 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8176; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8177 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8177,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8177,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8177,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8177,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8178 if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8178
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(nullsp,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),8178,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(nullsp))->classid != MAT_NULLSPACE_CLASSID
) { if (((PetscObject)(nullsp))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),8178,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8178,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8179 if (nullsp) {ierr = PetscObjectReference((PetscObject)nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8179,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
8180 ierr = MatNullSpaceDestroy(&mat->nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8180,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8181 mat->nullsp = nullsp;
8182 if (mat->symmetric_set && mat->symmetric) {
8183 ierr = MatSetTransposeNullSpace(mat,nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8183,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8184 }
8185 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8186}
8187
8188/*@
8189 MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
8190
8191 Logically Collective on Mat
8192
8193 Input Parameters:
8194+ mat - the matrix
8195- nullsp - the null space object
8196
8197 Level: developer
8198
8199.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNearNullSpace(), MatSetTransposeNullSpace(), MatSetNullSpace(), MatGetNullSpace()
8200@*/
8201PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
8202{
8203 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8203; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8204 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8204,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8204,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8205 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8206 PetscValidPointer(nullsp,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8206
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(nullsp,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8207 *nullsp = (mat->symmetric_set && mat->symmetric && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
8208 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8209}
8210
8211/*@
8212 MatSetTransposeNullSpace - attaches a null space to a matrix.
8213
8214 Logically Collective on Mat
8215
8216 Input Parameters:
8217+ mat - the matrix
8218- nullsp - the null space object
8219
8220 Level: advanced
8221
8222 Notes:
8223 For inconsistent singular systems (linear systems where the right hand side is not in the range of the operator) this allows the linear system to be solved in a least squares sense.
8224 You must also call MatSetNullSpace()
8225
8226
8227 The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8228 the domain of a matrix A (from R^n to R^m (m rows, n columns) R^n = the direct sum of the null space of A, n(A), + the range of A^T, R(A^T).
8229 Similarly R^m = direct sum n(A^T) + R(A). Hence the linear system A x = b has a solution only if b in R(A) (or correspondingly b is orthogonal to
8230 n(A^T)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
8231 the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n(A^T).
8232
8233 Krylov solvers can produce the minimal norm solution to the least squares problem by utilizing MatNullSpaceRemove().
8234
8235.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNearNullSpace(), MatGetNullSpace(), MatSetNullSpace(), MatGetTransposeNullSpace(), MatNullSpaceRemove()
8236@*/
8237PetscErrorCode MatSetTransposeNullSpace(Mat mat,MatNullSpace nullsp)
8238{
8239 PetscErrorCode ierr;
8240
8241 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8241; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8242 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8242,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8242,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8242,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8242,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8243 if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8243
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(nullsp,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),8243,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(nullsp))->classid != MAT_NULLSPACE_CLASSID
) { if (((PetscObject)(nullsp))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),8243,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8243,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8244 if (nullsp) {ierr = PetscObjectReference((PetscObject)nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
8245 ierr = MatNullSpaceDestroy(&mat->transnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8246 mat->transnullsp = nullsp;
8247 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8248}
8249
8250/*@
8251 MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
8252 This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
8253
8254 Logically Collective on Mat
8255
8256 Input Parameters:
8257+ mat - the matrix
8258- nullsp - the null space object
8259
8260 Level: advanced
8261
8262 Notes:
8263 Overwrites any previous near null space that may have been attached
8264
8265 You can remove the null space by calling this routine with an nullsp of NULL
8266
8267.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNullSpace(), MatNullSpaceCreateRigidBody(), MatGetNearNullSpace()
8268@*/
8269PetscErrorCode MatSetNearNullSpace(Mat mat,MatNullSpace nullsp)
8270{
8271 PetscErrorCode ierr;
8272
8273 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8273; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8274 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8274,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8274,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8275 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8276 if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8276
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(nullsp,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),8276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(nullsp))->classid != MAT_NULLSPACE_CLASSID
) { if (((PetscObject)(nullsp))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),8276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8276,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8277 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8277,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
8278 if (nullsp) {ierr = PetscObjectReference((PetscObject)nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
8279 ierr = MatNullSpaceDestroy(&mat->nearnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8279,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8280 mat->nearnullsp = nullsp;
8281 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8282}
8283
8284/*@
8285 MatGetNearNullSpace -Get null space attached with MatSetNearNullSpace()
8286
8287 Not Collective
8288
8289 Input Parameters:
8290. mat - the matrix
8291
8292 Output Parameters:
8293. nullsp - the null space object, NULL if not set
8294
8295 Level: developer
8296
8297.seealso: MatSetNearNullSpace(), MatGetNullSpace(), MatNullSpaceCreate()
8298@*/
8299PetscErrorCode MatGetNearNullSpace(Mat mat,MatNullSpace *nullsp)
8300{
8301 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8301; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8302 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8302,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8302,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8303 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8304 PetscValidPointer(nullsp,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8304
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(nullsp,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8304,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8305 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8305,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
8306 *nullsp = mat->nearnullsp;
8307 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8308}
8309
8310/*@C
8311 MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
8312
8313 Collective on Mat
8314
8315 Input Parameters:
8316+ mat - the matrix
8317. row - row/column permutation
8318. fill - expected fill factor >= 1.0
8319- level - level of fill, for ICC(k)
8320
8321 Notes:
8322 Probably really in-place only when level of fill is zero, otherwise allocates
8323 new space to store factored matrix and deletes previous memory.
8324
8325 Most users should employ the simplified KSP interface for linear solvers
8326 instead of working directly with matrix algebra routines such as this.
8327 See, e.g., KSPCreate().
8328
8329 Level: developer
8330
8331
8332.seealso: MatICCFactorSymbolic(), MatLUFactorNumeric(), MatCholeskyFactor()
8333
8334 Developer Note: fortran interface is not autogenerated as the f90
8335 interface defintion cannot be generated correctly [due to MatFactorInfo]
8336
8337@*/
8338PetscErrorCode MatICCFactor(Mat mat,IS row,const MatFactorInfo *info)
8339{
8340 PetscErrorCode ierr;
8341
8342 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8342; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8343 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8343,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8343,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8343,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8343,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8344 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8344,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8345 if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),8345,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8345,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8345,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8345,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8346 PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),8346
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8346,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
8347 if (mat->rmap->N != mat->cmap->N) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"matrix must be square")return PetscError(PetscObjectComm((PetscObject)mat),8347,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"matrix must be square")
;
8348 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),8348,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
8349 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),8349,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
8350 if (!mat->ops->iccfactor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),8350,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
8351 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8351,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
8352 ierr = (*mat->ops->iccfactor)(mat,row,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8352,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8353 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8353,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8354 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8355}
8356
8357/*@
8358 MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
8359 ghosted ones.
8360
8361 Not Collective
8362
8363 Input Parameters:
8364+ mat - the matrix
8365- diag = the diagonal values, including ghost ones
8366
8367 Level: developer
8368
8369 Notes:
8370 Works only for MPIAIJ and MPIBAIJ matrices
8371
8372.seealso: MatDiagonalScale()
8373@*/
8374PetscErrorCode MatDiagonalScaleLocal(Mat mat,Vec diag)
8375{
8376 PetscErrorCode ierr;
8377 PetscMPIInt size;
8378
8379 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8379; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8380 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8380,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8380,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8380,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8380,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8381 PetscValidHeaderSpecific(diag,VEC_CLASSID,2)do { if (!diag) return PetscError(((MPI_Comm)0x44000001),8381
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(diag,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8381,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(diag))->classid != VEC_CLASSID) { if
(((PetscObject)(diag))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),8381,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8381,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8382 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8382,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8383
8384 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be already assembled")return PetscError(PetscObjectComm((PetscObject)mat),8384,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Matrix must be already assembled")
;
8385 ierr = PetscLogEventBegin(MAT_Scale,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLB)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8385,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8386 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8386,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8387 if (size == 1) {
8388 PetscInt n,m;
8389 ierr = VecGetSize(diag,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8389,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8390 ierr = MatGetSize(mat,0,&m);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8390,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8391 if (m == n) {
8392 ierr = MatDiagonalScale(mat,0,diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8392,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8393 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only supported for sequential matrices when no ghost points/periodic conditions")return PetscError(((MPI_Comm)0x44000001),8393,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Only supported for sequential matrices when no ghost points/periodic conditions"
)
;
8394 } else {
8395 ierr = PetscUseMethod(mat,"MatDiagonalScaleLocal_C",(Mat,Vec),(mat,diag))0; do { PetscErrorCode (*f)(Mat,Vec), __ierr; __ierr = PetscObjectQueryFunction_Private
(((PetscObject)mat),("MatDiagonalScaleLocal_C"),(PetscVoidFunction
*)(&f));do {if (__builtin_expect(!!(__ierr),0)) return PetscError
(((MPI_Comm)0x44000001),8395,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,__ierr,PETSC_ERROR_REPEAT," ");} while (0); if (f) {__ierr =
(*f)(mat,diag);do {if (__builtin_expect(!!(__ierr),0)) return
PetscError(((MPI_Comm)0x44000001),8395,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,__ierr,PETSC_ERROR_REPEAT," ");} while (0);} else return PetscError
(PetscObjectComm((PetscObject)mat),8395,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Cannot locate function %s in object"
,"MatDiagonalScaleLocal_C"); } while(0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8395,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8396 }
8397 ierr = PetscLogEventEnd(MAT_Scale,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLE)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8397,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8398 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8398,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8399 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8400}
8401
8402/*@
8403 MatGetInertia - Gets the inertia from a factored matrix
8404
8405 Collective on Mat
8406
8407 Input Parameter:
8408. mat - the matrix
8409
8410 Output Parameters:
8411+ nneg - number of negative eigenvalues
8412. nzero - number of zero eigenvalues
8413- npos - number of positive eigenvalues
8414
8415 Level: advanced
8416
8417 Notes:
8418 Matrix must have been factored by MatCholeskyFactor()
8419
8420
8421@*/
8422PetscErrorCode MatGetInertia(Mat mat,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
8423{
8424 PetscErrorCode ierr;
8425
8426 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8426; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8427 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8427,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8427,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8427,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8427,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8428 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8429 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),8429,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
8430 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Numeric factor mat is not assembled")return PetscError(PetscObjectComm((PetscObject)mat),8430,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Numeric factor mat is not assembled")
;
8431 if (!mat->ops->getinertia) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),8431,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
8432 ierr = (*mat->ops->getinertia)(mat,nneg,nzero,npos);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8432,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8433 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8434}
8435
8436/* ----------------------------------------------------------------*/
8437/*@C
8438 MatSolves - Solves A x = b, given a factored matrix, for a collection of vectors
8439
8440 Neighbor-wise Collective on Mats
8441
8442 Input Parameters:
8443+ mat - the factored matrix
8444- b - the right-hand-side vectors
8445
8446 Output Parameter:
8447. x - the result vectors
8448
8449 Notes:
8450 The vectors b and x cannot be the same. I.e., one cannot
8451 call MatSolves(A,x,x).
8452
8453 Notes:
8454 Most users should employ the simplified KSP interface for linear solvers
8455 instead of working directly with matrix algebra routines such as this.
8456 See, e.g., KSPCreate().
8457
8458 Level: developer
8459
8460.seealso: MatSolveAdd(), MatSolveTranspose(), MatSolveTransposeAdd(), MatSolve()
8461@*/
8462PetscErrorCode MatSolves(Mat mat,Vecs b,Vecs x)
8463{
8464 PetscErrorCode ierr;
8465
8466 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8466; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8467 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8467,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8467,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8467,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8467,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8468 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8468,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8469 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),8469,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
8470 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),8470,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
8471 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8472
8473 if (!mat->ops->solves) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),8473,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
8474 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8474,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
8475 ierr = PetscLogEventBegin(MAT_Solves,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Solves].active) ? (*PetscLogPLB)((MAT_Solves),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8475,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8476 ierr = (*mat->ops->solves)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8477 ierr = PetscLogEventEnd(MAT_Solves,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Solves].active) ? (*PetscLogPLE)((MAT_Solves),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8477,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8478 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8479}
8480
8481/*@
8482 MatIsSymmetric - Test whether a matrix is symmetric
8483
8484 Collective on Mat
8485
8486 Input Parameter:
8487+ A - the matrix to test
8488- tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
8489
8490 Output Parameters:
8491. flg - the result
8492
8493 Notes:
8494 For real numbers MatIsSymmetric() and MatIsHermitian() return identical results
8495
8496 Level: intermediate
8497
8498.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsStructurallySymmetric(), MatSetOption(), MatIsSymmetricKnown()
8499@*/
8500PetscErrorCode MatIsSymmetric(Mat A,PetscReal tol,PetscBool *flg)
8501{
8502 PetscErrorCode ierr;
8503
8504 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8504; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8505 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8505,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8505,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8505,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8505,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8506 PetscValidPointer(flg,2)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8506,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(flg,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8506,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8507
8508 if (!A->symmetric_set) {
8509 if (!A->ops->issymmetric) {
8510 MatType mattype;
8511 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8511,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8512 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for symmetric",mattype)return PetscError(((MPI_Comm)0x44000001),8512,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for symmetric"
,mattype)
;
8513 }
8514 ierr = (*A->ops->issymmetric)(A,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8514,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8515 if (!tol) {
8516 A->symmetric_set = PETSC_TRUE;
8517 A->symmetric = *flg;
8518 if (A->symmetric) {
8519 A->structurally_symmetric_set = PETSC_TRUE;
8520 A->structurally_symmetric = PETSC_TRUE;
8521 }
8522 }
8523 } else if (A->symmetric) {
8524 *flg = PETSC_TRUE;
8525 } else if (!tol) {
8526 *flg = PETSC_FALSE;
8527 } else {
8528 if (!A->ops->issymmetric) {
8529 MatType mattype;
8530 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8530,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8531 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for symmetric",mattype)return PetscError(((MPI_Comm)0x44000001),8531,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for symmetric"
,mattype)
;
8532 }
8533 ierr = (*A->ops->issymmetric)(A,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8533,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8534 }
8535 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8536}
8537
8538/*@
8539 MatIsHermitian - Test whether a matrix is Hermitian
8540
8541 Collective on Mat
8542
8543 Input Parameter:
8544+ A - the matrix to test
8545- tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
8546
8547 Output Parameters:
8548. flg - the result
8549
8550 Level: intermediate
8551
8552.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsStructurallySymmetric(), MatSetOption(),
8553 MatIsSymmetricKnown(), MatIsSymmetric()
8554@*/
8555PetscErrorCode MatIsHermitian(Mat A,PetscReal tol,PetscBool *flg)
8556{
8557 PetscErrorCode ierr;
8558
8559 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8559; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8560 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8560,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8560,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8560,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8560,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8561 PetscValidPointer(flg,2)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8561,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(flg,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8561,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8562
8563 if (!A->hermitian_set) {
8564 if (!A->ops->ishermitian) {
8565 MatType mattype;
8566 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8566,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8567 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for hermitian",mattype)return PetscError(((MPI_Comm)0x44000001),8567,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for hermitian"
,mattype)
;
8568 }
8569 ierr = (*A->ops->ishermitian)(A,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8570 if (!tol) {
8571 A->hermitian_set = PETSC_TRUE;
8572 A->hermitian = *flg;
8573 if (A->hermitian) {
8574 A->structurally_symmetric_set = PETSC_TRUE;
8575 A->structurally_symmetric = PETSC_TRUE;
8576 }
8577 }
8578 } else if (A->hermitian) {
8579 *flg = PETSC_TRUE;
8580 } else if (!tol) {
8581 *flg = PETSC_FALSE;
8582 } else {
8583 if (!A->ops->ishermitian) {
8584 MatType mattype;
8585 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8585,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8586 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for hermitian",mattype)return PetscError(((MPI_Comm)0x44000001),8586,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for hermitian"
,mattype)
;
8587 }
8588 ierr = (*A->ops->ishermitian)(A,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8588,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8589 }
8590 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8591}
8592
8593/*@
8594 MatIsSymmetricKnown - Checks the flag on the matrix to see if it is symmetric.
8595
8596 Not Collective
8597
8598 Input Parameter:
8599. A - the matrix to check
8600
8601 Output Parameters:
8602+ set - if the symmetric flag is set (this tells you if the next flag is valid)
8603- flg - the result
8604
8605 Level: advanced
8606
8607 Note: Does not check the matrix values directly, so this may return unknown (set = PETSC_FALSE). Use MatIsSymmetric()
8608 if you want it explicitly checked
8609
8610.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsStructurallySymmetric(), MatSetOption(), MatIsSymmetric()
8611@*/
8612PetscErrorCode MatIsSymmetricKnown(Mat A,PetscBool *set,PetscBool *flg)
8613{
8614 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8614; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8615 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8615,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8615,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8615,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8615,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8616 PetscValidPointer(set,2)do { if (!set) return PetscError(((MPI_Comm)0x44000001),8616,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(set,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8616,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8617 PetscValidPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8617,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8617,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
8618 if (A->symmetric_set) {
8619 *set = PETSC_TRUE;
8620 *flg = A->symmetric;
8621 } else {
8622 *set = PETSC_FALSE;
8623 }
8624 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8625}
8626
8627/*@
8628 MatIsHermitianKnown - Checks the flag on the matrix to see if it is hermitian.
8629
8630 Not Collective
8631
8632 Input Parameter:
8633. A - the matrix to check
8634
8635 Output Parameters:
8636+ set - if the hermitian flag is set (this tells you if the next flag is valid)
8637- flg - the result
8638
8639 Level: advanced
8640
8641 Note: Does not check the matrix values directly, so this may return unknown (set = PETSC_FALSE). Use MatIsHermitian()
8642 if you want it explicitly checked
8643
8644.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsStructurallySymmetric(), MatSetOption(), MatIsSymmetric()
8645@*/
8646PetscErrorCode MatIsHermitianKnown(Mat A,PetscBool *set,PetscBool *flg)
8647{
8648 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8648; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8649 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8649,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8649,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8649,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8649,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8650 PetscValidPointer(set,2)do { if (!set) return PetscError(((MPI_Comm)0x44000001),8650,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(set,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8650,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8651 PetscValidPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8651,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8651,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
8652 if (A->hermitian_set) {
8653 *set = PETSC_TRUE;
8654 *flg = A->hermitian;
8655 } else {
8656 *set = PETSC_FALSE;
8657 }
8658 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8659}
8660
8661/*@
8662 MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
8663
8664 Collective on Mat
8665
8666 Input Parameter:
8667. A - the matrix to test
8668
8669 Output Parameters:
8670. flg - the result
8671
8672 Level: intermediate
8673
8674.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsSymmetric(), MatSetOption()
8675@*/
8676PetscErrorCode MatIsStructurallySymmetric(Mat A,PetscBool *flg)
8677{
8678 PetscErrorCode ierr;
8679
8680 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8680; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8681 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8681,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8681,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8681,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8681,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8682 PetscValidPointer(flg,2)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8682,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(flg,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8683 if (!A->structurally_symmetric_set) {
8684 if (!A->ops->isstructurallysymmetric) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix does not support checking for structural symmetric")return PetscError(PetscObjectComm((PetscObject)A),8684,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix does not support checking for structural symmetric")
;
8685 ierr = (*A->ops->isstructurallysymmetric)(A,&A->structurally_symmetric);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8686
8687 A->structurally_symmetric_set = PETSC_TRUE;
8688 }
8689 *flg = A->structurally_symmetric;
8690 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8691}
8692
8693/*@
8694 MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
8695 to be communicated to other processors during the MatAssemblyBegin/End() process
8696
8697 Not collective
8698
8699 Input Parameter:
8700. vec - the vector
8701
8702 Output Parameters:
8703+ nstash - the size of the stash
8704. reallocs - the number of additional mallocs incurred.
8705. bnstash - the size of the block stash
8706- breallocs - the number of additional mallocs incurred.in the block stash
8707
8708 Level: advanced
8709
8710.seealso: MatAssemblyBegin(), MatAssemblyEnd(), Mat, MatStashSetInitialSize()
8711
8712@*/
8713PetscErrorCode MatStashGetInfo(Mat mat,PetscInt *nstash,PetscInt *reallocs,PetscInt *bnstash,PetscInt *breallocs)
8714{
8715 PetscErrorCode ierr;
8716
8717 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8717; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8718 ierr = MatStashGetInfo_Private(&mat->stash,nstash,reallocs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8718,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8719 ierr = MatStashGetInfo_Private(&mat->bstash,bnstash,breallocs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8719,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8720 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8721}
8722
8723/*@C
8724 MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
8725 parallel layout
8726
8727 Collective on Mat
8728
8729 Input Parameter:
8730. mat - the matrix
8731
8732 Output Parameter:
8733+ right - (optional) vector that the matrix can be multiplied against
8734- left - (optional) vector that the matrix vector product can be stored in
8735
8736 Notes:
8737 The blocksize of the returned vectors is determined by the row and column block sizes set with MatSetBlockSizes() or the single blocksize (same for both) set by MatSetBlockSize().
8738
8739 Notes:
8740 These are new vectors which are not owned by the Mat, they should be destroyed in VecDestroy() when no longer needed
8741
8742 Level: advanced
8743
8744.seealso: MatCreate(), VecDestroy()
8745@*/
8746PetscErrorCode MatCreateVecs(Mat mat,Vec *right,Vec *left)
8747{
8748 PetscErrorCode ierr;
8749
8750 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8750; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8751 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8751,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8751,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8751,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8751,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8752 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8752,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8753 if (mat->ops->getvecs) {
8754 ierr = (*mat->ops->getvecs)(mat,right,left);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8755 } else {
8756 PetscInt rbs,cbs;
8757 ierr = MatGetBlockSizes(mat,&rbs,&cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8757,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8758 if (right) {
8759 if (mat->cmap->n < 0) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"PetscLayout for columns not yet setup")return PetscError(PetscObjectComm((PetscObject)mat),8759,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"PetscLayout for columns not yet setup")
;
8760 ierr = VecCreate(PetscObjectComm((PetscObject)mat),right);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8760,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8761 ierr = VecSetSizes(*right,mat->cmap->n,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8762 ierr = VecSetBlockSize(*right,cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8762,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8763 ierr = VecSetType(*right,mat->defaultvectype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8763,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8764 ierr = PetscLayoutReference(mat->cmap,&(*right)->map);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8764,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8765 }
8766 if (left) {
8767 if (mat->rmap->n < 0) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"PetscLayout for rows not yet setup")return PetscError(PetscObjectComm((PetscObject)mat),8767,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"PetscLayout for rows not yet setup")
;
8768 ierr = VecCreate(PetscObjectComm((PetscObject)mat),left);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8768,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8769 ierr = VecSetSizes(*left,mat->rmap->n,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8769,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8770 ierr = VecSetBlockSize(*left,rbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8770,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8771 ierr = VecSetType(*left,mat->defaultvectype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8771,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8772 ierr = PetscLayoutReference(mat->rmap,&(*left)->map);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8772,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8773 }
8774 }
8775 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8776}
8777
8778/*@C
8779 MatFactorInfoInitialize - Initializes a MatFactorInfo data structure
8780 with default values.
8781
8782 Not Collective
8783
8784 Input Parameters:
8785. info - the MatFactorInfo data structure
8786
8787
8788 Notes:
8789 The solvers are generally used through the KSP and PC objects, for example
8790 PCLU, PCILU, PCCHOLESKY, PCICC
8791
8792 Level: developer
8793
8794.seealso: MatFactorInfo
8795
8796 Developer Note: fortran interface is not autogenerated as the f90
8797 interface defintion cannot be generated correctly [due to MatFactorInfo]
8798
8799@*/
8800
8801PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
8802{
8803 PetscErrorCode ierr;
8804
8805 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8805; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8806 ierr = PetscMemzero(info,sizeof(MatFactorInfo));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8806,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8807 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8808}
8809
8810/*@
8811 MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
8812
8813 Collective on Mat
8814
8815 Input Parameters:
8816+ mat - the factored matrix
8817- is - the index set defining the Schur indices (0-based)
8818
8819 Notes:
8820 Call MatFactorSolveSchurComplement() or MatFactorSolveSchurComplementTranspose() after this call to solve a Schur complement system.
8821
8822 You can call MatFactorGetSchurComplement() or MatFactorCreateSchurComplement() after this call.
8823
8824 Level: developer
8825
8826.seealso: MatGetFactor(), MatFactorGetSchurComplement(), MatFactorRestoreSchurComplement(), MatFactorCreateSchurComplement(), MatFactorSolveSchurComplement(),
8827 MatFactorSolveSchurComplementTranspose(), MatFactorSolveSchurComplement()
8828
8829@*/
8830PetscErrorCode MatFactorSetSchurIS(Mat mat,IS is)
8831{
8832 PetscErrorCode ierr,(*f)(Mat,IS);
8833
8834 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8834; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8835 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8835,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8836 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8836,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8836,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8836,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8836,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8837 PetscValidType(is,2)do { if (!((PetscObject)is)->type_name) return PetscError(
((MPI_Comm)0x44000001),8837,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)is)->class_name,2); } while (0)
;
8838 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),8838,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),8838,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8838,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8838,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8839 PetscCheckSameComm(mat,1,is,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)is),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),8839,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),8839,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
8840 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),8840,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Only for factored matrix")
;
8841 ierr = PetscObjectQueryFunction((PetscObject)mat,"MatFactorSetSchurIS_C",&f)PetscObjectQueryFunction_Private(((PetscObject)mat),("MatFactorSetSchurIS_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8841,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8842 if (!f) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO")return PetscError(PetscObjectComm((PetscObject)mat),8842,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO"
)
;
8843 if (mat->schur) {
8844 ierr = MatDestroy(&mat->schur);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8844,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8845 }
8846 ierr = (*f)(mat,is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8846,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8847 if (!mat->schur) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_PLIB,"Schur complement has not been created")return PetscError(PetscObjectComm((PetscObject)mat),8847,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",77,PETSC_ERROR_INITIAL
,"Schur complement has not been created")
;
8848 ierr = MatFactorSetUpInPlaceSchur_Private(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8848,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8849 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8850}
8851
8852/*@
8853 MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
8854
8855 Logically Collective on Mat
8856
8857 Input Parameters:
8858+ F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
8859. S - location where to return the Schur complement, can be NULL
8860- status - the status of the Schur complement matrix, can be NULL
8861
8862 Notes:
8863 You must call MatFactorSetSchurIS() before calling this routine.
8864
8865 The routine provides a copy of the Schur matrix stored within the solver data structures.
8866 The caller must destroy the object when it is no longer needed.
8867 If MatFactorInvertSchurComplement() has been called, the routine gets back the inverse.
8868
8869 Use MatFactorGetSchurComplement() to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
8870
8871 Developer Notes:
8872 The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
8873 matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
8874
8875 See MatCreateSchurComplement() or MatGetSchurComplement() for ways to create virtual or approximate Schur complements.
8876
8877 Level: advanced
8878
8879 References:
8880
8881.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorGetSchurComplement(), MatFactorSchurStatus
8882@*/
8883PetscErrorCode MatFactorCreateSchurComplement(Mat F,Mat* S,MatFactorSchurStatus* status)
8884{
8885 PetscErrorCode ierr;
8886
8887 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8887; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8888 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),8888,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8888,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8888,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8888,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8889 if (S) PetscValidPointer(S,2)do { if (!S) return PetscError(((MPI_Comm)0x44000001),8889,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(S,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),8889,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",2); } while (0)
;
8890 if (status) PetscValidPointer(status,3)do { if (!status) return PetscError(((MPI_Comm)0x44000001),8890
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(status,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8890,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
8891 if (S) {
8892 PetscErrorCode (*f)(Mat,Mat*);
8893
8894 ierr = PetscObjectQueryFunction((PetscObject)F,"MatFactorCreateSchurComplement_C",&f)PetscObjectQueryFunction_Private(((PetscObject)F),("MatFactorCreateSchurComplement_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8894,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8895 if (f) {
8896 ierr = (*f)(F,S);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8896,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8897 } else {
8898 ierr = MatDuplicate(F->schur,MAT_COPY_VALUES,S);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8898,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8899 }
8900 }
8901 if (status) *status = F->schur_status;
8902 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8903}
8904
8905/*@
8906 MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
8907
8908 Logically Collective on Mat
8909
8910 Input Parameters:
8911+ F - the factored matrix obtained by calling MatGetFactor()
8912. *S - location where to return the Schur complement, can be NULL
8913- status - the status of the Schur complement matrix, can be NULL
8914
8915 Notes:
8916 You must call MatFactorSetSchurIS() before calling this routine.
8917
8918 Schur complement mode is currently implemented for sequential matrices.
8919 The routine returns a the Schur Complement stored within the data strutures of the solver.
8920 If MatFactorInvertSchurComplement() has previously been called, the returned matrix is actually the inverse of the Schur complement.
8921 The returned matrix should not be destroyed; the caller should call MatFactorRestoreSchurComplement() when the object is no longer needed.
8922
8923 Use MatFactorCreateSchurComplement() to create a copy of the Schur complement matrix that is within a factored matrix
8924
8925 See MatCreateSchurComplement() or MatGetSchurComplement() for ways to create virtual or approximate Schur complements.
8926
8927 Level: advanced
8928
8929 References:
8930
8931.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorRestoreSchurComplement(), MatFactorCreateSchurComplement(), MatFactorSchurStatus
8932@*/
8933PetscErrorCode MatFactorGetSchurComplement(Mat F,Mat* S,MatFactorSchurStatus* status)
8934{
8935 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8935; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8936 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),8936,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8936,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8936,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8936,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8937 if (S) PetscValidPointer(S,2)do { if (!S) return PetscError(((MPI_Comm)0x44000001),8937,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(S,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),8937,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",2); } while (0)
;
8938 if (status) PetscValidPointer(status,3)do { if (!status) return PetscError(((MPI_Comm)0x44000001),8938
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(status,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8938,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
8939 if (S) *S = F->schur;
8940 if (status) *status = F->schur_status;
8941 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8942}
8943
8944/*@
8945 MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to MatFactorGetSchurComplement
8946
8947 Logically Collective on Mat
8948
8949 Input Parameters:
8950+ F - the factored matrix obtained by calling MatGetFactor()
8951. *S - location where the Schur complement is stored
8952- status - the status of the Schur complement matrix (see MatFactorSchurStatus)
8953
8954 Notes:
8955
8956 Level: advanced
8957
8958 References:
8959
8960.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorRestoreSchurComplement(), MatFactorCreateSchurComplement(), MatFactorSchurStatus
8961@*/
8962PetscErrorCode MatFactorRestoreSchurComplement(Mat F,Mat* S,MatFactorSchurStatus status)
8963{
8964 PetscErrorCode ierr;
8965
8966 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8966; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8967 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),8967,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8967,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8967,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8967,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8968 if (S) {
8969 PetscValidHeaderSpecific(*S,MAT_CLASSID,2)do { if (!*S) return PetscError(((MPI_Comm)0x44000001),8969,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(*S,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),8969,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(*S))->classid != MAT_CLASSID) { if
(((PetscObject)(*S))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),8969,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8969,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8970 *S = NULL((void*)0);
8971 }
8972 F->schur_status = status;
8973 ierr = MatFactorUpdateSchurStatus_Private(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8973,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8974 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8975}
8976
8977/*@
8978 MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
8979
8980 Logically Collective on Mat
8981
8982 Input Parameters:
8983+ F - the factored matrix obtained by calling MatGetFactor()
8984. rhs - location where the right hand side of the Schur complement system is stored
8985- sol - location where the solution of the Schur complement system has to be returned
8986
8987 Notes:
8988 The sizes of the vectors should match the size of the Schur complement
8989
8990 Must be called after MatFactorSetSchurIS()
8991
8992 Level: advanced
8993
8994 References:
8995
8996.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorSolveSchurComplement()
8997@*/
8998PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
8999{
9000 PetscErrorCode ierr;
9001
9002 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9002; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9003 PetscValidType(F,1)do { if (!((PetscObject)F)->type_name) return PetscError((
(MPI_Comm)0x44000001),9003,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)F)->class_name,1); } while (0)
;
9004 PetscValidType(rhs,2)do { if (!((PetscObject)rhs)->type_name) return PetscError
(((MPI_Comm)0x44000001),9004,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)rhs)->class_name,2); } while (0)
;
9005 PetscValidType(sol,3)do { if (!((PetscObject)sol)->type_name) return PetscError
(((MPI_Comm)0x44000001),9005,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)sol)->class_name,3); } while (0)
;
9006 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),9006,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9006,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9006,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9006,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9007 PetscValidHeaderSpecific(rhs,VEC_CLASSID,2)do { if (!rhs) return PetscError(((MPI_Comm)0x44000001),9007,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(rhs,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),9007,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(rhs))->classid != VEC_CLASSID) { if
(((PetscObject)(rhs))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),9007,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9007,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9008 PetscValidHeaderSpecific(sol,VEC_CLASSID,3)do { if (!sol) return PetscError(((MPI_Comm)0x44000001),9008,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(sol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),9008,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(sol))->classid != VEC_CLASSID) { if
(((PetscObject)(sol))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),9008,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),9008,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
9009 PetscCheckSameComm(F,1,rhs,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)F),PetscObjectComm((PetscObject
)rhs),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),9009,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),9009,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
9010 PetscCheckSameComm(F,1,sol,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)F),PetscObjectComm((PetscObject
)sol),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),9010,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),9010,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
9011 ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9011,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9012 switch (F->schur_status) {
9013 case MAT_FACTOR_SCHUR_FACTORED:
9014 ierr = MatSolveTranspose(F->schur,rhs,sol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9014,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9015 break;
9016 case MAT_FACTOR_SCHUR_INVERTED:
9017 ierr = MatMultTranspose(F->schur,rhs,sol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9017,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9018 break;
9019 default:
9020 SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status)return PetscError(PetscObjectComm((PetscObject)F),9020,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Unhandled MatFactorSchurStatus %D",F->schur_status)
;
9021 break;
9022 }
9023 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9024}
9025
9026/*@
9027 MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9028
9029 Logically Collective on Mat
9030
9031 Input Parameters:
9032+ F - the factored matrix obtained by calling MatGetFactor()
9033. rhs - location where the right hand side of the Schur complement system is stored
9034- sol - location where the solution of the Schur complement system has to be returned
9035
9036 Notes:
9037 The sizes of the vectors should match the size of the Schur complement
9038
9039 Must be called after MatFactorSetSchurIS()
9040
9041 Level: advanced
9042
9043 References:
9044
9045.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorSolveSchurComplementTranspose()
9046@*/
9047PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9048{
9049 PetscErrorCode ierr;
9050
9051 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9051; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9052 PetscValidType(F,1)do { if (!((PetscObject)F)->type_name) return PetscError((
(MPI_Comm)0x44000001),9052,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)F)->class_name,1); } while (0)
;
9053 PetscValidType(rhs,2)do { if (!((PetscObject)rhs)->type_name) return PetscError
(((MPI_Comm)0x44000001),9053,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)rhs)->class_name,2); } while (0)
;
9054 PetscValidType(sol,3)do { if (!((PetscObject)sol)->type_name) return PetscError
(((MPI_Comm)0x44000001),9054,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)sol)->class_name,3); } while (0)
;
9055 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),9055,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9055,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9055,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9055,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9056 PetscValidHeaderSpecific(rhs,VEC_CLASSID,2)do { if (!rhs) return PetscError(((MPI_Comm)0x44000001),9056,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(rhs,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),9056,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(rhs))->classid != VEC_CLASSID) { if
(((PetscObject)(rhs))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),9056,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9056,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9057 PetscValidHeaderSpecific(sol,VEC_CLASSID,3)do { if (!sol) return PetscError(((MPI_Comm)0x44000001),9057,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(sol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),9057,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(sol))->classid != VEC_CLASSID) { if
(((PetscObject)(sol))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),9057,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),9057,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
9058 PetscCheckSameComm(F,1,rhs,2)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)F),PetscObjectComm((PetscObject
)rhs),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),9058,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),9058,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,__flag); } while (0)
;
9059 PetscCheckSameComm(F,1,sol,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)F),PetscObjectComm((PetscObject
)sol),&__flag);do {if (__builtin_expect(!!(_6_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),9059,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),9059,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,__flag); } while (0)
;
9060 ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9060,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9061 switch (F->schur_status) {
9062 case MAT_FACTOR_SCHUR_FACTORED:
9063 ierr = MatSolve(F->schur,rhs,sol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9063,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9064 break;
9065 case MAT_FACTOR_SCHUR_INVERTED:
9066 ierr = MatMult(F->schur,rhs,sol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9066,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9067 break;
9068 default:
9069 SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status)return PetscError(PetscObjectComm((PetscObject)F),9069,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Unhandled MatFactorSchurStatus %D",F->schur_status)
;
9070 break;
9071 }
9072 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9073}
9074
9075/*@
9076 MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
9077
9078 Logically Collective on Mat
9079
9080 Input Parameters:
9081+ F - the factored matrix obtained by calling MatGetFactor()
9082
9083 Notes:
9084 Must be called after MatFactorSetSchurIS().
9085
9086 Call MatFactorGetSchurComplement() or MatFactorCreateSchurComplement() AFTER this call to actually compute the inverse and get access to it.
9087
9088 Level: advanced
9089
9090 References:
9091
9092.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorGetSchurComplement(), MatFactorCreateSchurComplement()
9093@*/
9094PetscErrorCode MatFactorInvertSchurComplement(Mat F)
9095{
9096 PetscErrorCode ierr;
9097
9098 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9098; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9099 PetscValidType(F,1)do { if (!((PetscObject)F)->type_name) return PetscError((
(MPI_Comm)0x44000001),9099,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)F)->class_name,1); } while (0)
;
9100 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),9100,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9100,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9100,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9100,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9101 if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9102 ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9102,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9103 ierr = MatFactorInvertSchurComplement_Private(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9103,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9104 F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
9105 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9106}
9107
9108/*@
9109 MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
9110
9111 Logically Collective on Mat
9112
9113 Input Parameters:
9114+ F - the factored matrix obtained by calling MatGetFactor()
9115
9116 Notes:
9117 Must be called after MatFactorSetSchurIS().
9118
9119 Level: advanced
9120
9121 References:
9122
9123.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorInvertSchurComplement()
9124@*/
9125PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
9126{
9127 PetscErrorCode ierr;
9128
9129 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9129; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9130 PetscValidType(F,1)do { if (!((PetscObject)F)->type_name) return PetscError((
(MPI_Comm)0x44000001),9130,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)F)->class_name,1); } while (0)
;
9131 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),9131,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9131,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9131,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9131,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9132 if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9133 ierr = MatFactorFactorizeSchurComplement_Private(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9133,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9134 F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
9135 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9136}
9137
9138static PetscErrorCode MatPtAP_Basic(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
9139{
9140 Mat AP;
9141 PetscErrorCode ierr;
9142
9143 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9143; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9144 ierr = PetscInfo2(A,"Mat types %s and %s using basic PtAP\n",((PetscObject)A)->type_name,((PetscObject)P)->type_name)PetscInfo_Private(__func__,A,"Mat types %s and %s using basic PtAP\n"
,((PetscObject)A)->type_name,((PetscObject)P)->type_name
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9144,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9145 ierr = MatMatMult(A,P,MAT_INITIAL_MATRIX,PETSC_DEFAULT-2,&AP);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9145,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9146 ierr = MatTransposeMatMult(P,AP,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9146,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9147 ierr = MatDestroy(&AP);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9147,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9148 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9149}
9150
9151/*@
9152 MatPtAP - Creates the matrix product C = P^T * A * P
9153
9154 Neighbor-wise Collective on Mat
9155
9156 Input Parameters:
9157+ A - the matrix
9158. P - the projection matrix
9159. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9160- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use PETSC_DEFAULT if you do not have a good estimate
9161 if the result is a dense matrix this is irrelevent
9162
9163 Output Parameters:
9164. C - the product matrix
9165
9166 Notes:
9167 C will be created and must be destroyed by the user with MatDestroy().
9168
9169 For matrix types without special implementation the function fallbacks to MatMatMult() followed by MatTransposeMatMult().
9170
9171 Level: intermediate
9172
9173.seealso: MatPtAPSymbolic(), MatPtAPNumeric(), MatMatMult(), MatRARt()
9174@*/
9175PetscErrorCode MatPtAP(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
9176{
9177 PetscErrorCode ierr;
9178 PetscErrorCode (*fA)(Mat,Mat,MatReuse,PetscReal,Mat*);
9179 PetscErrorCode (*fP)(Mat,Mat,MatReuse,PetscReal,Mat*);
9180 PetscErrorCode (*ptap)(Mat,Mat,MatReuse,PetscReal,Mat*)=NULL((void*)0);
9181 PetscBool sametype;
9182
9183 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9183; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9184 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9184,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9184,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9184,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9184,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9185 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9185,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9186 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9186,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9187 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9187,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9188 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9188,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9189 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9189,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9190 PetscValidHeaderSpecific(P,MAT_CLASSID,2)do { if (!P) return PetscError(((MPI_Comm)0x44000001),9190,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(P,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9190,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(P))->classid != MAT_CLASSID) { if (
((PetscObject)(P))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9190,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9190,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9191 PetscValidType(P,2)do { if (!((PetscObject)P)->type_name) return PetscError((
(MPI_Comm)0x44000001),9191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)P)->class_name,2); } while (0)
;
9192 MatCheckPreallocated(P,2)do { if (__builtin_expect(!!(!(P)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9192,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"P",__func__); } while (0)
;
9193 if (!P->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9193,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9194 if (P->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9194,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9195
9196 if (A->rmap->N != A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix A must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9196,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix A must be square, %D != %D",A->rmap->N,A->cmap
->N)
;
9197 if (P->rmap->N != A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9197,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->rmap->
N,A->cmap->N)
;
9198 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9199 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9199,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9200
9201 if (scall == MAT_REUSE_MATRIX) {
9202 PetscValidPointer(*C,5)do { if (!*C) return PetscError(((MPI_Comm)0x44000001),9202,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(*C
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9202,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",5); } while (0)
;
9203 PetscValidHeaderSpecific(*C,MAT_CLASSID,5)do { if (!*C) return PetscError(((MPI_Comm)0x44000001),9203,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",5); if (!PetscCheckPointer(*C,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),9203,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(*C))->classid != MAT_CLASSID) { if
(((PetscObject)(*C))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),9203,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),9203,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",5); } } while (0)
;
9204
9205 ierr = PetscLogEventBegin(MAT_PtAP,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAP].active) ? (*PetscLogPLB)((MAT_PtAP),0,(PetscObject
)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9206 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPNumeric].active) ? (*PetscLogPLB)((MAT_PtAPNumeric),
0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9207 if ((*C)->ops->ptapnumeric) {
9208 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9208,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9209 } else {
9210 ierr = MatPtAP_Basic(A,P,scall,fill,C);
Value stored to 'ierr' is never read
9211 }
9212 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPNumeric].active) ? (*PetscLogPLE)((MAT_PtAPNumeric),
0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9212,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9213 ierr = PetscLogEventEnd(MAT_PtAP,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAP].active) ? (*PetscLogPLE)((MAT_PtAP),0,(PetscObject
)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9213,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9214 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9215 }
9216
9217 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9218 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9218,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9219
9220 fA = A->ops->ptap;
9221 fP = P->ops->ptap;
9222 ierr = PetscStrcmp(((PetscObject)A)->type_name,((PetscObject)P)->type_name,&sametype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9223 if (fP == fA && sametype) {
9224 ptap = fA;
9225 } else {
9226 /* dispatch based on the type of A and P from their PetscObject's PetscFunctionLists. */
9227 char ptapname[256];
9228 ierr = PetscStrncpy(ptapname,"MatPtAP_",sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9228,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9229 ierr = PetscStrlcat(ptapname,((PetscObject)A)->type_name,sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9229,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9230 ierr = PetscStrlcat(ptapname,"_",sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9230,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9231 ierr = PetscStrlcat(ptapname,((PetscObject)P)->type_name,sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9231,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9232 ierr = PetscStrlcat(ptapname,"_C",sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9232,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* e.g., ptapname = "MatPtAP_seqdense_seqaij_C" */
9233 ierr = PetscObjectQueryFunction((PetscObject)P,ptapname,&ptap)PetscObjectQueryFunction_Private(((PetscObject)P),(ptapname),
(PetscVoidFunction*)(&ptap))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9233,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9234 }
9235
9236 if (!ptap) ptap = MatPtAP_Basic;
9237 ierr = PetscLogEventBegin(MAT_PtAP,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAP].active) ? (*PetscLogPLB)((MAT_PtAP),0,(PetscObject
)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9237,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9238 ierr = (*ptap)(A,P,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9238,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9239 ierr = PetscLogEventEnd(MAT_PtAP,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAP].active) ? (*PetscLogPLE)((MAT_PtAP),0,(PetscObject
)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9239,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9240 if (A->symmetric_set && A->symmetric) {
9241 ierr = MatSetOption(*C,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9241,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9242 }
9243 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9244}
9245
9246/*@
9247 MatPtAPNumeric - Computes the matrix product C = P^T * A * P
9248
9249 Neighbor-wise Collective on Mat
9250
9251 Input Parameters:
9252+ A - the matrix
9253- P - the projection matrix
9254
9255 Output Parameters:
9256. C - the product matrix
9257
9258 Notes:
9259 C must have been created by calling MatPtAPSymbolic and must be destroyed by
9260 the user using MatDeatroy().
9261
9262 This routine is currently only implemented for pairs of AIJ matrices and classes
9263 which inherit from AIJ. C will be of type MATAIJ.
9264
9265 Level: intermediate
9266
9267.seealso: MatPtAP(), MatPtAPSymbolic(), MatMatMultNumeric()
9268@*/
9269PetscErrorCode MatPtAPNumeric(Mat A,Mat P,Mat C)
9270{
9271 PetscErrorCode ierr;
9272
9273 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9273; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9274 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9274,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9274,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9275 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9276 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9276,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9277 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9277,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9278 PetscValidHeaderSpecific(P,MAT_CLASSID,2)do { if (!P) return PetscError(((MPI_Comm)0x44000001),9278,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(P,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(P))->classid != MAT_CLASSID) { if (
((PetscObject)(P))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9278,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9279 PetscValidType(P,2)do { if (!((PetscObject)P)->type_name) return PetscError((
(MPI_Comm)0x44000001),9279,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)P)->class_name,2); } while (0)
;
9280 MatCheckPreallocated(P,2)do { if (__builtin_expect(!!(!(P)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9280,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"P",__func__); } while (0)
;
9281 if (!P->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9281,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9282 if (P->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9282,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9283 PetscValidHeaderSpecific(C,MAT_CLASSID,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9283,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(C,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9283,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(C))->classid != MAT_CLASSID) { if (
((PetscObject)(C))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9283,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),9283,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
9284 PetscValidType(C,3)do { if (!((PetscObject)C)->type_name) return PetscError((
(MPI_Comm)0x44000001),9284,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)C)->class_name,3); } while (0)
;
9285 MatCheckPreallocated(C,3)do { if (__builtin_expect(!!(!(C)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9285,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(3),"C",__func__); } while (0)
;
9286 if (C->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9286,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9287 if (P->cmap->N!=C->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->cmap->N,C->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9287,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->cmap->
N,C->rmap->N)
;
9288 if (P->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9288,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->rmap->
N,A->cmap->N)
;
9289 if (A->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9289,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->
cmap->N)
;
9290 if (P->cmap->N!=C->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->cmap->N,C->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9290,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->cmap->
N,C->cmap->N)
;
9291 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9291,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9292
9293 if (!C->ops->ptapnumeric) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"MatPtAPNumeric implementation is missing. You should call MatPtAPSymbolic first")return PetscError(PetscObjectComm((PetscObject)C),9293,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"MatPtAPNumeric implementation is missing. You should call MatPtAPSymbolic first"
)
;
9294 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPNumeric].active) ? (*PetscLogPLB)((MAT_PtAPNumeric),
0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9295 ierr = (*C->ops->ptapnumeric)(A,P,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9295,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9296 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPNumeric].active) ? (*PetscLogPLE)((MAT_PtAPNumeric),
0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9296,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9297 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9298}
9299
9300/*@
9301 MatPtAPSymbolic - Creates the (i,j) structure of the matrix product C = P^T * A * P
9302
9303 Neighbor-wise Collective on Mat
9304
9305 Input Parameters:
9306+ A - the matrix
9307- P - the projection matrix
9308
9309 Output Parameters:
9310. C - the (i,j) structure of the product matrix
9311
9312 Notes:
9313 C will be created and must be destroyed by the user with MatDestroy().
9314
9315 This routine is currently only implemented for pairs of SeqAIJ matrices and classes
9316 which inherit from SeqAIJ. C will be of type MATSEQAIJ. The product is computed using
9317 this (i,j) structure by calling MatPtAPNumeric().
9318
9319 Level: intermediate
9320
9321.seealso: MatPtAP(), MatPtAPNumeric(), MatMatMultSymbolic()
9322@*/
9323PetscErrorCode MatPtAPSymbolic(Mat A,Mat P,PetscReal fill,Mat *C)
9324{
9325 PetscErrorCode ierr;
9326
9327 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9327; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9328 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9328,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9328,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9328,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9328,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9329 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9329,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9330 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9330,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9331 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9331,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9332 if (fill <1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9332,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9333 PetscValidHeaderSpecific(P,MAT_CLASSID,2)do { if (!P) return PetscError(((MPI_Comm)0x44000001),9333,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(P,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9333,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(P))->classid != MAT_CLASSID) { if (
((PetscObject)(P))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9333,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9333,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9334 PetscValidType(P,2)do { if (!((PetscObject)P)->type_name) return PetscError((
(MPI_Comm)0x44000001),9334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)P)->class_name,2); } while (0)
;
9335 MatCheckPreallocated(P,2)do { if (__builtin_expect(!!(!(P)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9335,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"P",__func__); } while (0)
;
9336 if (!P->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9336,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9337 if (P->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9337,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9338 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9338,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9338,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9339
9340 if (P->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9340,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->rmap->
N,A->cmap->N)
;
9341 if (A->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9341,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->
cmap->N)
;
9342 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9342,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9343
9344 if (!A->ops->ptapsymbolic) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatType %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9344,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatType %s",((PetscObject)A)->type_name)
;
9345 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPSymbolic].active) ? (*PetscLogPLB)((MAT_PtAPSymbolic
),0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9345,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9346 ierr = (*A->ops->ptapsymbolic)(A,P,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9346,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9347 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPSymbolic].active) ? (*PetscLogPLE)((MAT_PtAPSymbolic
),0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9347,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9348
9349 /* ierr = MatSetBlockSize(*C,A->rmap->bs);CHKERRQ(ierr); NO! this is not always true -ma */
9350 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9351}
9352
9353/*@
9354 MatRARt - Creates the matrix product C = R * A * R^T
9355
9356 Neighbor-wise Collective on Mat
9357
9358 Input Parameters:
9359+ A - the matrix
9360. R - the projection matrix
9361. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9362- fill - expected fill as ratio of nnz(C)/nnz(A), use PETSC_DEFAULT if you do not have a good estimate
9363 if the result is a dense matrix this is irrelevent
9364
9365 Output Parameters:
9366. C - the product matrix
9367
9368 Notes:
9369 C will be created and must be destroyed by the user with MatDestroy().
9370
9371 This routine is currently only implemented for pairs of AIJ matrices and classes
9372 which inherit from AIJ. Due to PETSc sparse matrix block row distribution among processes,
9373 parallel MatRARt is implemented via explicit transpose of R, which could be very expensive.
9374 We recommend using MatPtAP().
9375
9376 Level: intermediate
9377
9378.seealso: MatRARtSymbolic(), MatRARtNumeric(), MatMatMult(), MatPtAP()
9379@*/
9380PetscErrorCode MatRARt(Mat A,Mat R,MatReuse scall,PetscReal fill,Mat *C)
9381{
9382 PetscErrorCode ierr;
9383
9384 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9384; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9385 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9385,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9385,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9385,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9385,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9386 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9386,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9387 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9387,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9388 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9388,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9389 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9389,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9390 PetscValidHeaderSpecific(R,MAT_CLASSID,2)do { if (!R) return PetscError(((MPI_Comm)0x44000001),9390,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(R,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9390,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(R))->classid != MAT_CLASSID) { if (
((PetscObject)(R))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9390,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9390,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9391 PetscValidType(R,2)do { if (!((PetscObject)R)->type_name) return PetscError((
(MPI_Comm)0x44000001),9391,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)R)->class_name,2); } while (0)
;
9392 MatCheckPreallocated(R,2)do { if (__builtin_expect(!!(!(R)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9392,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"R",__func__); } while (0)
;
9393 if (!R->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9393,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9394 if (R->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9394,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9395 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9395,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9395,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9396 if (R->cmap->N!=A->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)R),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->cmap->N,A->rmap->N)return PetscError(PetscObjectComm((PetscObject)R),9396,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->cmap->
N,A->rmap->N)
;
9397
9398 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9399 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9399,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9400 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9400,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9401
9402 if (!A->ops->rart) {
9403 Mat Rt;
9404 ierr = MatTranspose(R,MAT_INITIAL_MATRIX,&Rt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9404,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9405 ierr = MatMatMatMult(R,A,Rt,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9405,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9406 ierr = MatDestroy(&Rt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9406,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9407 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9408 }
9409 ierr = PetscLogEventBegin(MAT_RARt,A,R,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARt].active) ? (*PetscLogPLB)((MAT_RARt),0,(PetscObject
)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9409,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9410 ierr = (*A->ops->rart)(A,R,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9410,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9411 ierr = PetscLogEventEnd(MAT_RARt,A,R,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARt].active) ? (*PetscLogPLE)((MAT_RARt),0,(PetscObject
)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9411,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9412 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9413}
9414
9415/*@
9416 MatRARtNumeric - Computes the matrix product C = R * A * R^T
9417
9418 Neighbor-wise Collective on Mat
9419
9420 Input Parameters:
9421+ A - the matrix
9422- R - the projection matrix
9423
9424 Output Parameters:
9425. C - the product matrix
9426
9427 Notes:
9428 C must have been created by calling MatRARtSymbolic and must be destroyed by
9429 the user using MatDestroy().
9430
9431 This routine is currently only implemented for pairs of AIJ matrices and classes
9432 which inherit from AIJ. C will be of type MATAIJ.
9433
9434 Level: intermediate
9435
9436.seealso: MatRARt(), MatRARtSymbolic(), MatMatMultNumeric()
9437@*/
9438PetscErrorCode MatRARtNumeric(Mat A,Mat R,Mat C)
9439{
9440 PetscErrorCode ierr;
9441
9442 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9442; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9443 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9443,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9443,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9443,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9443,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9444 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9444,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9445 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9445,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9446 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9446,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9447 PetscValidHeaderSpecific(R,MAT_CLASSID,2)do { if (!R) return PetscError(((MPI_Comm)0x44000001),9447,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(R,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9447,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(R))->classid != MAT_CLASSID) { if (
((PetscObject)(R))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9447,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9447,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9448 PetscValidType(R,2)do { if (!((PetscObject)R)->type_name) return PetscError((
(MPI_Comm)0x44000001),9448,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)R)->class_name,2); } while (0)
;
9449 MatCheckPreallocated(R,2)do { if (__builtin_expect(!!(!(R)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9449,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"R",__func__); } while (0)
;
9450 if (!R->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9450,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9451 if (R->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9451,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9452 PetscValidHeaderSpecific(C,MAT_CLASSID,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9452,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(C,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9452,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(C))->classid != MAT_CLASSID) { if (
((PetscObject)(C))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9452,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),9452,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
9453 PetscValidType(C,3)do { if (!((PetscObject)C)->type_name) return PetscError((
(MPI_Comm)0x44000001),9453,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)C)->class_name,3); } while (0)
;
9454 MatCheckPreallocated(C,3)do { if (__builtin_expect(!!(!(C)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9454,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(3),"C",__func__); } while (0)
;
9455 if (C->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9455,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9456 if (R->rmap->N!=C->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->rmap->N,C->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9456,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->rmap->
N,C->rmap->N)
;
9457 if (R->cmap->N!=A->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->cmap->N,A->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9457,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->cmap->
N,A->rmap->N)
;
9458 if (A->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9458,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->
cmap->N)
;
9459 if (R->rmap->N!=C->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->rmap->N,C->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9459,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->rmap->
N,C->cmap->N)
;
9460 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9460,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9461
9462 ierr = PetscLogEventBegin(MAT_RARtNumeric,A,R,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARtNumeric].active) ? (*PetscLogPLB)((MAT_RARtNumeric),
0,(PetscObject)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9462,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9463 ierr = (*A->ops->rartnumeric)(A,R,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9463,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9464 ierr = PetscLogEventEnd(MAT_RARtNumeric,A,R,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARtNumeric].active) ? (*PetscLogPLE)((MAT_RARtNumeric),
0,(PetscObject)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9464,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9465 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9466}
9467
9468/*@
9469 MatRARtSymbolic - Creates the (i,j) structure of the matrix product C = R * A * R^T
9470
9471 Neighbor-wise Collective on Mat
9472
9473 Input Parameters:
9474+ A - the matrix
9475- R - the projection matrix
9476
9477 Output Parameters:
9478. C - the (i,j) structure of the product matrix
9479
9480 Notes:
9481 C will be created and must be destroyed by the user with MatDestroy().
9482
9483 This routine is currently only implemented for pairs of SeqAIJ matrices and classes
9484 which inherit from SeqAIJ. C will be of type MATSEQAIJ. The product is computed using
9485 this (i,j) structure by calling MatRARtNumeric().
9486
9487 Level: intermediate
9488
9489.seealso: MatRARt(), MatRARtNumeric(), MatMatMultSymbolic()
9490@*/
9491PetscErrorCode MatRARtSymbolic(Mat A,Mat R,PetscReal fill,Mat *C)
9492{
9493 PetscErrorCode ierr;
9494
9495 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9495; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9496 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9496,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9496,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9496,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9496,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9497 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9497,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9498 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9498,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9499 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9499,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9500 if (fill <1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9500,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9501 PetscValidHeaderSpecific(R,MAT_CLASSID,2)do { if (!R) return PetscError(((MPI_Comm)0x44000001),9501,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(R,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9501,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(R))->classid != MAT_CLASSID) { if (
((PetscObject)(R))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9501,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9501,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9502 PetscValidType(R,2)do { if (!((PetscObject)R)->type_name) return PetscError((
(MPI_Comm)0x44000001),9502,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)R)->class_name,2); } while (0)
;
9503 MatCheckPreallocated(R,2)do { if (__builtin_expect(!!(!(R)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9503,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"R",__func__); } while (0)
;
9504 if (!R->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9504,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9505 if (R->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9505,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9506 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9506,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9506,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9507
9508 if (R->cmap->N!=A->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->cmap->N,A->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9508,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->cmap->
N,A->rmap->N)
;
9509 if (A->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9509,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->
cmap->N)
;
9510 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9510,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9511 ierr = PetscLogEventBegin(MAT_RARtSymbolic,A,R,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARtSymbolic].active) ? (*PetscLogPLB)((MAT_RARtSymbolic
),0,(PetscObject)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9511,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9512 ierr = (*A->ops->rartsymbolic)(A,R,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9512,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9513 ierr = PetscLogEventEnd(MAT_RARtSymbolic,A,R,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARtSymbolic].active) ? (*PetscLogPLE)((MAT_RARtSymbolic
),0,(PetscObject)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9513,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9514
9515 ierr = MatSetBlockSizes(*C,PetscAbs(R->rmap->bs)(((R->rmap->bs) >= 0) ? (R->rmap->bs) : (-(R->
rmap->bs)))
,PetscAbs(R->rmap->bs)(((R->rmap->bs) >= 0) ? (R->rmap->bs) : (-(R->
rmap->bs)))
);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9515,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9516 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9517}
9518
9519/*@
9520 MatMatMult - Performs Matrix-Matrix Multiplication C=A*B.
9521
9522 Neighbor-wise Collective on Mat
9523
9524 Input Parameters:
9525+ A - the left matrix
9526. B - the right matrix
9527. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9528- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if you do not have a good estimate
9529 if the result is a dense matrix this is irrelevent
9530
9531 Output Parameters:
9532. C - the product matrix
9533
9534 Notes:
9535 Unless scall is MAT_REUSE_MATRIX C will be created.
9536
9537 MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
9538 call to this function with either MAT_INITIAL_MATRIX or MatMatMultSymbolic()
9539
9540 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9541 actually needed.
9542
9543 If you have many matrices with the same non-zero structure to multiply, you
9544 should either
9545$ 1) use MAT_REUSE_MATRIX in all calls but the first or
9546$ 2) call MatMatMultSymbolic() once and then MatMatMultNumeric() for each product needed
9547 In the special case where matrix B (and hence C) are dense you can create the correctly sized matrix C yourself and then call this routine
9548 with MAT_REUSE_MATRIX, rather than first having MatMatMult() create it for you. You can NEVER do this if the matrix C is sparse.
9549
9550 Level: intermediate
9551
9552.seealso: MatMatMultSymbolic(), MatMatMultNumeric(), MatTransposeMatMult(), MatMatTransposeMult(), MatPtAP()
9553@*/
9554PetscErrorCode MatMatMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9555{
9556 PetscErrorCode ierr;
9557 PetscErrorCode (*fA)(Mat,Mat,MatReuse,PetscReal,Mat*);
9558 PetscErrorCode (*fB)(Mat,Mat,MatReuse,PetscReal,Mat*);
9559 PetscErrorCode (*mult)(Mat,Mat,MatReuse,PetscReal,Mat*)=NULL((void*)0);
9560 Mat T;
9561 PetscBool istrans;
9562
9563 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9563; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9564 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9564,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9564,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9564,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9564,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9565 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9565,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9566 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9566,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9567 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9567,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9568 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9568,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9569 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9569,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9569,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9570 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9570,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9571 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9571,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9572 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9572,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9573 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9573,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9574 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9574,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9574,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9575 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9575,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9576 if (B->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",B->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9576,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",B->rmap->
N,A->cmap->N)
;
9577 ierr = PetscObjectTypeCompare((PetscObject)A,MATTRANSPOSEMAT"transpose",&istrans);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9577,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9578 if (istrans) {
9579 ierr = MatTransposeGetMat(A,&T);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9579,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9580 ierr = MatTransposeMatMult(T,B,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9581 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9582 } else {
9583 ierr = PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT"transpose",&istrans);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9583,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9584 if (istrans) {
9585 ierr = MatTransposeGetMat(B,&T);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9585,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9586 ierr = MatMatTransposeMult(A,T,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9586,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9587 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9588 }
9589 }
9590 if (scall == MAT_REUSE_MATRIX) {
9591 PetscValidPointer(*C,5)do { if (!*C) return PetscError(((MPI_Comm)0x44000001),9591,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(*C
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9591,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",5); } while (0)
;
9592 PetscValidHeaderSpecific(*C,MAT_CLASSID,5)do { if (!*C) return PetscError(((MPI_Comm)0x44000001),9592,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",5); if (!PetscCheckPointer(*C,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),9592,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(*C))->classid != MAT_CLASSID) { if
(((PetscObject)(*C))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),9592,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),9592,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",5); } } while (0)
;
9593 ierr = PetscLogEventBegin(MAT_MatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMult].active) ? (*PetscLogPLB)((MAT_MatMult),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9593,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9594 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultNumeric].active) ? (*PetscLogPLB)((MAT_MatMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9594,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9595 ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9595,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9596 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultNumeric].active) ? (*PetscLogPLE)((MAT_MatMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9596,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9597 ierr = PetscLogEventEnd(MAT_MatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMult].active) ? (*PetscLogPLE)((MAT_MatMult),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9597,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9598 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9599 }
9600 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9601 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9601,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9602
9603 fA = A->ops->matmult;
9604 fB = B->ops->matmult;
9605 if (fB == fA) {
9606 if (!fB) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatMatMult not supported for B of type %s",((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9606,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatMatMult not supported for B of type %s",((PetscObject)B)
->type_name)
;
9607 mult = fB;
9608 } else {
9609 /* dispatch based on the type of A and B from their PetscObject's PetscFunctionLists. */
9610 char multname[256];
9611 ierr = PetscStrncpy(multname,"MatMatMult_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9611,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9612 ierr = PetscStrlcat(multname,((PetscObject)A)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9612,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9613 ierr = PetscStrlcat(multname,"_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9613,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9614 ierr = PetscStrlcat(multname,((PetscObject)B)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9614,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9615 ierr = PetscStrlcat(multname,"_C",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9615,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* e.g., multname = "MatMatMult_seqdense_seqaij_C" */
9616 ierr = PetscObjectQueryFunction((PetscObject)B,multname,&mult)PetscObjectQueryFunction_Private(((PetscObject)B),(multname),
(PetscVoidFunction*)(&mult))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9616,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9617 if (!mult) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatMatMult requires A, %s, to be compatible with B, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9617,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatMatMult requires A, %s, to be compatible with B, %s",((PetscObject
)A)->type_name,((PetscObject)B)->type_name)
;
9618 }
9619 ierr = PetscLogEventBegin(MAT_MatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMult].active) ? (*PetscLogPLB)((MAT_MatMult),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9619,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9620 ierr = (*mult)(A,B,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9620,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9621 ierr = PetscLogEventEnd(MAT_MatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMult].active) ? (*PetscLogPLE)((MAT_MatMult),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9621,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9622 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9623}
9624
9625/*@
9626 MatMatMultSymbolic - Performs construction, preallocation, and computes the ij structure
9627 of the matrix-matrix product C=A*B. Call this routine before calling MatMatMultNumeric().
9628
9629 Neighbor-wise Collective on Mat
9630
9631 Input Parameters:
9632+ A - the left matrix
9633. B - the right matrix
9634- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if you do not have a good estimate,
9635 if C is a dense matrix this is irrelevent
9636
9637 Output Parameters:
9638. C - the product matrix
9639
9640 Notes:
9641 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9642 actually needed.
9643
9644 This routine is currently implemented for
9645 - pairs of AIJ matrices and classes which inherit from AIJ, C will be of type AIJ
9646 - pairs of AIJ (A) and Dense (B) matrix, C will be of type Dense.
9647 - pairs of Dense (A) and AIJ (B) matrix, C will be of type Dense.
9648
9649 Level: intermediate
9650
9651 Developers Note: There are ways to estimate the number of nonzeros in the resulting product, see for example, https://arxiv.org/abs/1006.4173
9652 We should incorporate them into PETSc.
9653
9654.seealso: MatMatMult(), MatMatMultNumeric()
9655@*/
9656PetscErrorCode MatMatMultSymbolic(Mat A,Mat B,PetscReal fill,Mat *C)
9657{
9658 PetscErrorCode ierr;
9659 PetscErrorCode (*Asymbolic)(Mat,Mat,PetscReal,Mat*);
9660 PetscErrorCode (*Bsymbolic)(Mat,Mat,PetscReal,Mat*);
9661 PetscErrorCode (*symbolic)(Mat,Mat,PetscReal,Mat*)=NULL((void*)0);
9662
9663 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9663; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9664 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9664,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9664,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9664,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9664,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9665 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9665,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9666 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9666,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9667 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9667,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9668
9669 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9669,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9669,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9669,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9669,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9670 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9670,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9671 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9671,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9672 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9672,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9673 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9673,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9674 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9674,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9674,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9675
9676 if (B->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",B->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9676,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",B->rmap->
N,A->cmap->N)
;
9677 if (fill == PETSC_DEFAULT-2) fill = 2.0;
9678 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be > 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9678,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be > 1.0",(double)fill)
;
9679 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9679,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9680
9681 Asymbolic = A->ops->matmultsymbolic;
9682 Bsymbolic = B->ops->matmultsymbolic;
9683 if (Asymbolic == Bsymbolic) {
9684 if (!Bsymbolic) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"C=A*B not implemented for B of type %s",((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9684,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"C=A*B not implemented for B of type %s",((PetscObject)B)->
type_name)
;
9685 symbolic = Bsymbolic;
9686 } else { /* dispatch based on the type of A and B */
9687 char symbolicname[256];
9688 ierr = PetscStrncpy(symbolicname,"MatMatMultSymbolic_",sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9689 ierr = PetscStrlcat(symbolicname,((PetscObject)A)->type_name,sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9689,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9690 ierr = PetscStrlcat(symbolicname,"_",sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9690,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9691 ierr = PetscStrlcat(symbolicname,((PetscObject)B)->type_name,sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9691,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9692 ierr = PetscStrlcat(symbolicname,"_C",sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9692,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9693 ierr = PetscObjectQueryFunction((PetscObject)B,symbolicname,&symbolic)PetscObjectQueryFunction_Private(((PetscObject)B),(symbolicname
),(PetscVoidFunction*)(&symbolic))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9693,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9694 if (!symbolic) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatMatMultSymbolic requires A, %s, to be compatible with B, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9694,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatMatMultSymbolic requires A, %s, to be compatible with B, %s"
,((PetscObject)A)->type_name,((PetscObject)B)->type_name
)
;
9695 }
9696 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultSymbolic].active) ? (*PetscLogPLB)((MAT_MatMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9696,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9697 ierr = (*symbolic)(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9697,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9698 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultSymbolic].active) ? (*PetscLogPLE)((MAT_MatMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9698,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9699 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9700}
9701
9702/*@
9703 MatMatMultNumeric - Performs the numeric matrix-matrix product.
9704 Call this routine after first calling MatMatMultSymbolic().
9705
9706 Neighbor-wise Collective on Mat
9707
9708 Input Parameters:
9709+ A - the left matrix
9710- B - the right matrix
9711
9712 Output Parameters:
9713. C - the product matrix, which was created by from MatMatMultSymbolic() or a call to MatMatMult().
9714
9715 Notes:
9716 C must have been created with MatMatMultSymbolic().
9717
9718 This routine is currently implemented for
9719 - pairs of AIJ matrices and classes which inherit from AIJ, C will be of type MATAIJ.
9720 - pairs of AIJ (A) and Dense (B) matrix, C will be of type Dense.
9721 - pairs of Dense (A) and AIJ (B) matrix, C will be of type Dense.
9722
9723 Level: intermediate
9724
9725.seealso: MatMatMult(), MatMatMultSymbolic()
9726@*/
9727PetscErrorCode MatMatMultNumeric(Mat A,Mat B,Mat C)
9728{
9729 PetscErrorCode ierr;
9730
9731 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9731; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9732 ierr = MatMatMult(A,B,MAT_REUSE_MATRIX,0.0,&C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9732,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9733 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9734}
9735
9736/*@
9737 MatMatTransposeMult - Performs Matrix-Matrix Multiplication C=A*B^T.
9738
9739 Neighbor-wise Collective on Mat
9740
9741 Input Parameters:
9742+ A - the left matrix
9743. B - the right matrix
9744. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9745- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if not known
9746
9747 Output Parameters:
9748. C - the product matrix
9749
9750 Notes:
9751 C will be created if MAT_INITIAL_MATRIX and must be destroyed by the user with MatDestroy().
9752
9753 MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call
9754
9755 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9756 actually needed.
9757
9758 This routine is currently only implemented for pairs of SeqAIJ matrices, for the SeqDense class,
9759 and for pairs of MPIDense matrices.
9760
9761 Options Database Keys:
9762+ -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorthims for MPIDense matrices: the
9763 first redundantly copies the transposed B matrix on each process and requiers O(log P) communication complexity;
9764 the second never stores more than one portion of the B matrix at a time by requires O(P) communication complexity.
9765
9766 Level: intermediate
9767
9768.seealso: MatMatTransposeMultSymbolic(), MatMatTransposeMultNumeric(), MatMatMult(), MatTransposeMatMult() MatPtAP()
9769@*/
9770PetscErrorCode MatMatTransposeMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9771{
9772 PetscErrorCode ierr;
9773 PetscErrorCode (*fA)(Mat,Mat,MatReuse,PetscReal,Mat*);
9774 PetscErrorCode (*fB)(Mat,Mat,MatReuse,PetscReal,Mat*);
9775 Mat T;
9776 PetscBool istrans;
9777
9778 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9778; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9779 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9779,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9779,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9779,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9779,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9780 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9780,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9781 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9781,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9782 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9782,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9783 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9783,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9784 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9784,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9784,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9784,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9784,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9785 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9785,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9786 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9786,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9787 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9787,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9788 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9788,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9789 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9789,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9789,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9790 if (B->cmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, AN %D != BN %D",A->cmap->N,B->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9790,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, AN %D != BN %D",A->cmap
->N,B->cmap->N)
;
9791 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9792 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be > 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9792,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be > 1.0",(double)fill)
;
9793 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9793,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9794
9795 ierr = PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT"transpose",&istrans);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9795,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9796 if (istrans) {
9797 ierr = MatTransposeGetMat(B,&T);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9797,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9798 ierr = MatMatMult(A,T,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9798,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9799 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9800 }
9801 fA = A->ops->mattransposemult;
9802 if (!fA) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatMatTransposeMult not supported for A of type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9802,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatMatTransposeMult not supported for A of type %s",((PetscObject
)A)->type_name)
;
9803 fB = B->ops->mattransposemult;
9804 if (!fB) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatMatTransposeMult not supported for B of type %s",((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9804,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatMatTransposeMult not supported for B of type %s",((PetscObject
)B)->type_name)
;
9805 if (fB!=fA) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatMatTransposeMult requires A, %s, to be compatible with B, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9805,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatMatTransposeMult requires A, %s, to be compatible with B, %s"
,((PetscObject)A)->type_name,((PetscObject)B)->type_name
)
;
9806
9807 ierr = PetscLogEventBegin(MAT_MatTransposeMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMult].active) ? (*PetscLogPLB)((MAT_MatTransposeMult
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9807,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9808 if (scall == MAT_INITIAL_MATRIX) {
9809 ierr = PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMultSymbolic].active) ? (*PetscLogPLB)((MAT_MatTransposeMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9809,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9810 ierr = (*A->ops->mattransposemultsymbolic)(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9811 ierr = PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMultSymbolic].active) ? (*PetscLogPLE)((MAT_MatTransposeMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9812 }
9813 ierr = PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMultNumeric].active) ? (*PetscLogPLB)((MAT_MatTransposeMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9813,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9814 ierr = (*A->ops->mattransposemultnumeric)(A,B,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9814,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9815 ierr = PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMultNumeric].active) ? (*PetscLogPLE)((MAT_MatTransposeMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9815,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9816 ierr = PetscLogEventEnd(MAT_MatTransposeMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMult].active) ? (*PetscLogPLE)((MAT_MatTransposeMult
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9816,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9817 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9818}
9819
9820/*@
9821 MatTransposeMatMult - Performs Matrix-Matrix Multiplication C=A^T*B.
9822
9823 Neighbor-wise Collective on Mat
9824
9825 Input Parameters:
9826+ A - the left matrix
9827. B - the right matrix
9828. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9829- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if not known
9830
9831 Output Parameters:
9832. C - the product matrix
9833
9834 Notes:
9835 C will be created if MAT_INITIAL_MATRIX and must be destroyed by the user with MatDestroy().
9836
9837 MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call
9838
9839 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9840 actually needed.
9841
9842 This routine is currently implemented for pairs of AIJ matrices and pairs of SeqDense matrices and classes
9843 which inherit from SeqAIJ. C will be of same type as the input matrices.
9844
9845 Level: intermediate
9846
9847.seealso: MatTransposeMatMultSymbolic(), MatTransposeMatMultNumeric(), MatMatMult(), MatMatTransposeMult(), MatPtAP()
9848@*/
9849PetscErrorCode MatTransposeMatMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9850{
9851 PetscErrorCode ierr;
9852 PetscErrorCode (*fA)(Mat,Mat,MatReuse,PetscReal,Mat*);
9853 PetscErrorCode (*fB)(Mat,Mat,MatReuse,PetscReal,Mat*);
9854 PetscErrorCode (*transposematmult)(Mat,Mat,MatReuse,PetscReal,Mat*) = NULL((void*)0);
9855 Mat T;
9856 PetscBool istrans;
9857
9858 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9858; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9859 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9859,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9859,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9859,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9859,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9860 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9860,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9861 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9861,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9862 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9862,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9863 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9863,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9864 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9864,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9864,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9864,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9864,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9865 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9865,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9866 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9866,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9867 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9867,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9868 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9868,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9869 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9869,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9869,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9870 if (B->rmap->N!=A->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",B->rmap->N,A->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9870,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",B->rmap->
N,A->rmap->N)
;
9871 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9872 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be > 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9872,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be > 1.0",(double)fill)
;
9873 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9873,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9874
9875 ierr = PetscObjectTypeCompare((PetscObject)A,MATTRANSPOSEMAT"transpose",&istrans);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9875,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9876 if (istrans) {
9877 ierr = MatTransposeGetMat(A,&T);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9877,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9878 ierr = MatMatMult(T,B,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9878,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9879 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9880 }
9881 fA = A->ops->transposematmult;
9882 fB = B->ops->transposematmult;
9883 if (fB==fA) {
9884 if (!fA) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatTransposeMatMult not supported for A of type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9884,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatTransposeMatMult not supported for A of type %s",((PetscObject
)A)->type_name)
;
9885 transposematmult = fA;
9886 } else {
9887 /* dispatch based on the type of A and B from their PetscObject's PetscFunctionLists. */
9888 char multname[256];
9889 ierr = PetscStrncpy(multname,"MatTransposeMatMult_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9889,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9890 ierr = PetscStrlcat(multname,((PetscObject)A)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9890,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9891 ierr = PetscStrlcat(multname,"_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9891,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9892 ierr = PetscStrlcat(multname,((PetscObject)B)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9892,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9893 ierr = PetscStrlcat(multname,"_C",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9893,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* e.g., multname = "MatMatMult_seqdense_seqaij_C" */
9894 ierr = PetscObjectQueryFunction((PetscObject)B,multname,&transposematmult)PetscObjectQueryFunction_Private(((PetscObject)B),(multname),
(PetscVoidFunction*)(&transposematmult))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9894,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9895 if (!transposematmult) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatTransposeMatMult requires A, %s, to be compatible with B, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9895,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatTransposeMatMult requires A, %s, to be compatible with B, %s"
,((PetscObject)A)->type_name,((PetscObject)B)->type_name
)
;
9896 }
9897 ierr = PetscLogEventBegin(MAT_TransposeMatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeMatMult].active) ? (*PetscLogPLB)((MAT_TransposeMatMult
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9897,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9898 ierr = (*transposematmult)(A,B,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9898,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9899 ierr = PetscLogEventEnd(MAT_TransposeMatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeMatMult].active) ? (*PetscLogPLE)((MAT_TransposeMatMult
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9899,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9900 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9901}
9902
9903/*@
9904 MatMatMatMult - Performs Matrix-Matrix-Matrix Multiplication D=A*B*C.
9905
9906 Neighbor-wise Collective on Mat
9907
9908 Input Parameters:
9909+ A - the left matrix
9910. B - the middle matrix
9911. C - the right matrix
9912. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9913- fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use PETSC_DEFAULT if you do not have a good estimate
9914 if the result is a dense matrix this is irrelevent
9915
9916 Output Parameters:
9917. D - the product matrix
9918
9919 Notes:
9920 Unless scall is MAT_REUSE_MATRIX D will be created.
9921
9922 MAT_REUSE_MATRIX can only be used if the matrices A, B and C have the same nonzero pattern as in the previous call
9923
9924 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9925 actually needed.
9926
9927 If you have many matrices with the same non-zero structure to multiply, you
9928 should use MAT_REUSE_MATRIX in all calls but the first or
9929
9930 Level: intermediate
9931
9932.seealso: MatMatMult, MatPtAP()
9933@*/
9934PetscErrorCode MatMatMatMult(Mat A,Mat B,Mat C,MatReuse scall,PetscReal fill,Mat *D)
9935{
9936 PetscErrorCode ierr;
9937 PetscErrorCode (*fA)(Mat,Mat,Mat,MatReuse,PetscReal,Mat*);
9938 PetscErrorCode (*fB)(Mat,Mat,Mat,MatReuse,PetscReal,Mat*);
9939 PetscErrorCode (*fC)(Mat,Mat,Mat,MatReuse,PetscReal,Mat*);
9940 PetscErrorCode (*mult)(Mat,Mat,Mat,MatReuse,PetscReal,Mat*)=NULL((void*)0);
9941
9942 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9942; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9943 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9943,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9943,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9943,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9943,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9944 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9944,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9945 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9945,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9946 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9946,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9947 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9947,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9948 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9948,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9949 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9949,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9949,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9949,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9949,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9950 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9950,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9951 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9951,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9952 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)B),9952,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9953 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)B),9953,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9954 PetscValidHeaderSpecific(C,MAT_CLASSID,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9954,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(C,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9954,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(C))->classid != MAT_CLASSID) { if (
((PetscObject)(C))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9954,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),9954,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
9955 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9955,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9955,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9956 MatCheckPreallocated(C,3)do { if (__builtin_expect(!!(!(C)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9956,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(3),"C",__func__); } while (0)
;
9957 if (!C->assembled) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)C),9957,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9958 if (C->factortype) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)C),9958,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9959 if (B->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",B->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)B),9959,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",B->rmap->
N,A->cmap->N)
;
9960 if (C->rmap->N!=B->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",C->rmap->N,B->cmap->N)return PetscError(PetscObjectComm((PetscObject)C),9960,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",C->rmap->
N,B->cmap->N)
;
9961 if (scall == MAT_REUSE_MATRIX) {
9962 PetscValidPointer(*D,6)do { if (!*D) return PetscError(((MPI_Comm)0x44000001),9962,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(*D
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9962,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",6); } while (0)
;
9963 PetscValidHeaderSpecific(*D,MAT_CLASSID,6)do { if (!*D) return PetscError(((MPI_Comm)0x44000001),9963,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",6); if (!PetscCheckPointer(*D,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),9963,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,6); if (((PetscObject)(*D))->classid != MAT_CLASSID) { if
(((PetscObject)(*D))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),9963,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,6); else return PetscError(((MPI_Comm)0x44000001),9963,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",6); } } while (0)
;
9964 ierr = PetscLogEventBegin(MAT_MatMatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMatMult].active) ? (*PetscLogPLB)((MAT_MatMatMult),0,
(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9964,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9965 ierr = (*(*D)->ops->matmatmult)(A,B,C,scall,fill,D);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9965,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9966 ierr = PetscLogEventEnd(MAT_MatMatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMatMult].active) ? (*PetscLogPLE)((MAT_MatMatMult),0,
(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9966,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9967 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9968 }
9969 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9970 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9970,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9971
9972 fA = A->ops->matmatmult;
9973 fB = B->ops->matmatmult;
9974 fC = C->ops->matmatmult;
9975 if (fA == fB && fA == fC) {
9976 if (!fA) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatMatMatMult not supported for A of type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9976,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatMatMatMult not supported for A of type %s",((PetscObject
)A)->type_name)
;
9977 mult = fA;
9978 } else {
9979 /* dispatch based on the type of A, B and C from their PetscObject's PetscFunctionLists. */
9980 char multname[256];
9981 ierr = PetscStrncpy(multname,"MatMatMatMult_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9981,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9982 ierr = PetscStrlcat(multname,((PetscObject)A)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9982,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9983 ierr = PetscStrlcat(multname,"_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9983,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9984 ierr = PetscStrlcat(multname,((PetscObject)B)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9985 ierr = PetscStrlcat(multname,"_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9985,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9986 ierr = PetscStrlcat(multname,((PetscObject)C)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9987 ierr = PetscStrlcat(multname,"_C",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9987,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9988 ierr = PetscObjectQueryFunction((PetscObject)B,multname,&mult)PetscObjectQueryFunction_Private(((PetscObject)B),(multname),
(PetscVoidFunction*)(&mult))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9989 if (!mult) SETERRQ3(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatMatMatMult requires A, %s, to be compatible with B, %s, C, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name,((PetscObject)C)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9989,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatMatMatMult requires A, %s, to be compatible with B, %s, C, %s"
,((PetscObject)A)->type_name,((PetscObject)B)->type_name
,((PetscObject)C)->type_name)
;
9990 }
9991 ierr = PetscLogEventBegin(MAT_MatMatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMatMult].active) ? (*PetscLogPLB)((MAT_MatMatMult),0,
(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9991,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9992 ierr = (*mult)(A,B,C,scall,fill,D);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9992,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9993 ierr = PetscLogEventEnd(MAT_MatMatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMatMult].active) ? (*PetscLogPLE)((MAT_MatMatMult),0,
(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9993,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9994 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9995}
9996
9997/*@
9998 MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
9999
10000 Collective on Mat
10001
10002 Input Parameters:
10003+ mat - the matrix
10004. nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10005. subcomm - MPI communicator split from the communicator where mat resides in (or MPI_COMM_NULL if nsubcomm is used)
10006- reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10007
10008 Output Parameter:
10009. matredundant - redundant matrix
10010
10011 Notes:
10012 MAT_REUSE_MATRIX can only be used when the nonzero structure of the
10013 original matrix has not changed from that last call to MatCreateRedundantMatrix().
10014
10015 This routine creates the duplicated matrices in subcommunicators; you should NOT create them before
10016 calling it.
10017
10018 Level: advanced
10019
10020
10021.seealso: MatDestroy()
10022@*/
10023PetscErrorCode MatCreateRedundantMatrix(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,MatReuse reuse,Mat *matredundant)
10024{
10025 PetscErrorCode ierr;
10026 MPI_Comm comm;
10027 PetscMPIInt size;
10028 PetscInt mloc_sub,nloc_sub,rstart,rend,M=mat->rmap->N,N=mat->cmap->N,bs=mat->rmap->bs;
10029 Mat_Redundant *redund=NULL((void*)0);
10030 PetscSubcomm psubcomm=NULL((void*)0);
10031 MPI_Comm subcomm_in=subcomm;
10032 Mat *matseq;
10033 IS isrow,iscol;
10034 PetscBool newsubcomm=PETSC_FALSE;
10035
10036 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10036; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10037 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10037
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10037,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10037,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10037,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10038 if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10039 PetscValidPointer(*matredundant,5)do { if (!*matredundant) return PetscError(((MPI_Comm)0x44000001
),10039,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(*matredundant,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),10039,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
10040 PetscValidHeaderSpecific(*matredundant,MAT_CLASSID,5)do { if (!*matredundant) return PetscError(((MPI_Comm)0x44000001
),10040,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",5); if (
!PetscCheckPointer(*matredundant,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),10040,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(*matredundant))->classid != MAT_CLASSID
) { if (((PetscObject)(*matredundant))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),10040,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),10040,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",5); } } while (0)
;
10041 }
10042
10043 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10043,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10044 if (size == 1 || nsubcomm == 1) {
10045 if (reuse == MAT_INITIAL_MATRIX) {
10046 ierr = MatDuplicate(mat,MAT_COPY_VALUES,matredundant);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10046,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10047 } else {
10048 if (*matredundant == mat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix")return PetscError(((MPI_Comm)0x44000001),10048,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix"
)
;
10049 ierr = MatCopy(mat,*matredundant,SAME_NONZERO_PATTERN);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10049,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10050 }
10051 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10052 }
10053
10054 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),10054,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
10055 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),10055,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
10056 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),10056,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
10057
10058 ierr = PetscLogEventBegin(MAT_RedundantMat,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RedundantMat].active) ? (*PetscLogPLB)((MAT_RedundantMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10058,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10059 if (subcomm_in == MPI_COMM_NULL((MPI_Comm)0x04000000) && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10060 /* create psubcomm, then get subcomm */
10061 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10061,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10062 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10062,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10063 if (nsubcomm < 1 || nsubcomm > size) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"nsubcomm must between 1 and %D",size)return PetscError(((MPI_Comm)0x44000001),10063,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"nsubcomm must between 1 and %D",size
)
;
10064
10065 ierr = PetscSubcommCreate(comm,&psubcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10065,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10066 ierr = PetscSubcommSetNumber(psubcomm,nsubcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10066,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10067 ierr = PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10067,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10068 ierr = PetscSubcommSetFromOptions(psubcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10068,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10069 ierr = PetscCommDuplicate(PetscSubcommChild(psubcomm),&subcomm,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10069,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10070 newsubcomm = PETSC_TRUE;
10071 ierr = PetscSubcommDestroy(&psubcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10071,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10072 }
10073
10074 /* get isrow, iscol and a local sequential matrix matseq[0] */
10075 if (reuse == MAT_INITIAL_MATRIX) {
10076 mloc_sub = PETSC_DECIDE-1;
10077 nloc_sub = PETSC_DECIDE-1;
10078 if (bs < 1) {
10079 ierr = PetscSplitOwnership(subcomm,&mloc_sub,&M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10079,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10080 ierr = PetscSplitOwnership(subcomm,&nloc_sub,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10080,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10081 } else {
10082 ierr = PetscSplitOwnershipBlock(subcomm,bs,&mloc_sub,&M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10082,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10083 ierr = PetscSplitOwnershipBlock(subcomm,bs,&nloc_sub,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10083,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10084 }
10085 ierr = MPI_Scan(&mloc_sub,&rend,1,MPIU_INT((MPI_Datatype)0x4c000405),MPI_SUM(MPI_Op)(0x58000003),subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10085,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10086 rstart = rend - mloc_sub;
10087 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),mloc_sub,rstart,1,&isrow);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10087,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10088 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),N,0,1,&iscol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10088,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10089 } else { /* reuse == MAT_REUSE_MATRIX */
10090 if (*matredundant == mat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix")return PetscError(((MPI_Comm)0x44000001),10090,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix"
)
;
10091 /* retrieve subcomm */
10092 ierr = PetscObjectGetComm((PetscObject)(*matredundant),&subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10092,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10093 redund = (*matredundant)->redundant;
10094 isrow = redund->isrow;
10095 iscol = redund->iscol;
10096 matseq = redund->matseq;
10097 }
10098 ierr = MatCreateSubMatrices(mat,1,&isrow,&iscol,reuse,&matseq);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10098,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10099
10100 /* get matredundant over subcomm */
10101 if (reuse == MAT_INITIAL_MATRIX) {
10102 ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,matseq[0],nloc_sub,reuse,matredundant);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10102,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10103
10104 /* create a supporting struct and attach it to C for reuse */
10105 ierr = PetscNewLog(*matredundant,&redund)(PetscMallocA(1,PETSC_TRUE,10105,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**(((&redund)))),(((&redund)))) ||
PetscLogObjectMemory((PetscObject)*matredundant,sizeof(**(&
redund))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10105,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10106 (*matredundant)->redundant = redund;
10107 redund->isrow = isrow;
10108 redund->iscol = iscol;
10109 redund->matseq = matseq;
10110 if (newsubcomm) {
10111 redund->subcomm = subcomm;
10112 } else {
10113 redund->subcomm = MPI_COMM_NULL((MPI_Comm)0x04000000);
10114 }
10115 } else {
10116 ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,matseq[0],PETSC_DECIDE-1,reuse,matredundant);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10116,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10117 }
10118 ierr = PetscLogEventEnd(MAT_RedundantMat,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RedundantMat].active) ? (*PetscLogPLE)((MAT_RedundantMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10118,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10119 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10120}
10121
10122/*@C
10123 MatGetMultiProcBlock - Create multiple [bjacobi] 'parallel submatrices' from
10124 a given 'mat' object. Each submatrix can span multiple procs.
10125
10126 Collective on Mat
10127
10128 Input Parameters:
10129+ mat - the matrix
10130. subcomm - the subcommunicator obtained by com_split(comm)
10131- scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10132
10133 Output Parameter:
10134. subMat - 'parallel submatrices each spans a given subcomm
10135
10136 Notes:
10137 The submatrix partition across processors is dictated by 'subComm' a
10138 communicator obtained by com_split(comm). The comm_split
10139 is not restriced to be grouped with consecutive original ranks.
10140
10141 Due the comm_split() usage, the parallel layout of the submatrices
10142 map directly to the layout of the original matrix [wrt the local
10143 row,col partitioning]. So the original 'DiagonalMat' naturally maps
10144 into the 'DiagonalMat' of the subMat, hence it is used directly from
10145 the subMat. However the offDiagMat looses some columns - and this is
10146 reconstructed with MatSetValues()
10147
10148 Level: advanced
10149
10150
10151.seealso: MatCreateSubMatrices()
10152@*/
10153PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall,Mat *subMat)
10154{
10155 PetscErrorCode ierr;
10156 PetscMPIInt commsize,subCommSize;
10157
10158 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10158; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10159 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&commsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10159,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10160 ierr = MPI_Comm_size(subComm,&subCommSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10160,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10161 if (subCommSize > commsize) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"CommSize %D < SubCommZize %D",commsize,subCommSize)return PetscError(PetscObjectComm((PetscObject)mat),10161,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"CommSize %D < SubCommZize %D",commsize,subCommSize)
;
10162
10163 if (scall == MAT_REUSE_MATRIX && *subMat == mat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix")return PetscError(((MPI_Comm)0x44000001),10163,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix"
)
;
10164 ierr = PetscLogEventBegin(MAT_GetMultiProcBlock,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetMultiProcBlock].active) ? (*PetscLogPLB)((MAT_GetMultiProcBlock
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10164,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10165 ierr = (*mat->ops->getmultiprocblock)(mat,subComm,scall,subMat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10165,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10166 ierr = PetscLogEventEnd(MAT_GetMultiProcBlock,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetMultiProcBlock].active) ? (*PetscLogPLE)((MAT_GetMultiProcBlock
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10166,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10167 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10168}
10169
10170/*@
10171 MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10172
10173 Not Collective
10174
10175 Input Arguments:
10176 mat - matrix to extract local submatrix from
10177 isrow - local row indices for submatrix
10178 iscol - local column indices for submatrix
10179
10180 Output Arguments:
10181 submat - the submatrix
10182
10183 Level: intermediate
10184
10185 Notes:
10186 The submat should be returned with MatRestoreLocalSubMatrix().
10187
10188 Depending on the format of mat, the returned submat may not implement MatMult(). Its communicator may be
10189 the same as mat, it may be PETSC_COMM_SELF, or some other subcomm of mat's.
10190
10191 The submat always implements MatSetValuesLocal(). If isrow and iscol have the same block size, then
10192 MatSetValuesBlockedLocal() will also be implemented.
10193
10194 The mat must have had a ISLocalToGlobalMapping provided to it with MatSetLocalToGlobalMapping(). Note that
10195 matrices obtained with DMCreateMatrix() generally already have the local to global mapping provided.
10196
10197.seealso: MatRestoreLocalSubMatrix(), MatCreateLocalRef(), MatSetLocalToGlobalMapping()
10198@*/
10199PetscErrorCode MatGetLocalSubMatrix(Mat mat,IS isrow,IS iscol,Mat *submat)
10200{
10201 PetscErrorCode ierr;
10202
10203 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10203; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10204 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10204
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10204,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10205 PetscValidHeaderSpecific(isrow,IS_CLASSID,2)do { if (!isrow) return PetscError(((MPI_Comm)0x44000001),10205
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(isrow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(isrow))->classid != IS_CLASSID) { if
(((PetscObject)(isrow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),10205,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
10206 PetscValidHeaderSpecific(iscol,IS_CLASSID,3)do { if (!iscol) return PetscError(((MPI_Comm)0x44000001),10206
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(iscol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(iscol))->classid != IS_CLASSID) { if
(((PetscObject)(iscol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),10206,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
10207 PetscCheckSameComm(isrow,2,iscol,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)isrow),PetscObjectComm((PetscObject
)iscol),&__flag);do {if (__builtin_expect(!!(_6_ierr),0))
return PetscError(((MPI_Comm)0x44000001),10207,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),10207,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,2,3,__flag); } while (0)
;
10208 PetscValidPointer(submat,4)do { if (!submat) return PetscError(((MPI_Comm)0x44000001),10208
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(submat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),10208,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
10209 if (!mat->rmap->mapping) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Matrix must have local to global mapping provided before this call")return PetscError(PetscObjectComm((PetscObject)mat),10209,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Matrix must have local to global mapping provided before this call"
)
;
10210
10211 if (mat->ops->getlocalsubmatrix) {
10212 ierr = (*mat->ops->getlocalsubmatrix)(mat,isrow,iscol,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10212,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10213 } else {
10214 ierr = MatCreateLocalRef(mat,isrow,iscol,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10214,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10215 }
10216 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10217}
10218
10219/*@
10220 MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering
10221
10222 Not Collective
10223
10224 Input Arguments:
10225 mat - matrix to extract local submatrix from
10226 isrow - local row indices for submatrix
10227 iscol - local column indices for submatrix
10228 submat - the submatrix
10229
10230 Level: intermediate
10231
10232.seealso: MatGetLocalSubMatrix()
10233@*/
10234PetscErrorCode MatRestoreLocalSubMatrix(Mat mat,IS isrow,IS iscol,Mat *submat)
10235{
10236 PetscErrorCode ierr;
10237
10238 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10238; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10239 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10239
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10239,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10239,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10239,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10240 PetscValidHeaderSpecific(isrow,IS_CLASSID,2)do { if (!isrow) return PetscError(((MPI_Comm)0x44000001),10240
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(isrow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10240,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(isrow))->classid != IS_CLASSID) { if
(((PetscObject)(isrow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10240,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),10240,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
10241 PetscValidHeaderSpecific(iscol,IS_CLASSID,3)do { if (!iscol) return PetscError(((MPI_Comm)0x44000001),10241
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(iscol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10241,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(iscol))->classid != IS_CLASSID) { if
(((PetscObject)(iscol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10241,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),10241,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
10242 PetscCheckSameComm(isrow,2,iscol,3)do { PetscErrorCode _6_ierr,__flag; _6_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)isrow),PetscObjectComm((PetscObject
)iscol),&__flag);do {if (__builtin_expect(!!(_6_ierr),0))
return PetscError(((MPI_Comm)0x44000001),10242,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_6_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (__flag != 1
&& __flag != 0) return PetscError(((MPI_Comm)0x44000001
),10242,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,2,3,__flag); } while (0)
;
10243 PetscValidPointer(submat,4)do { if (!submat) return PetscError(((MPI_Comm)0x44000001),10243
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(submat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),10243,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
10244 if (*submat) {
10245 PetscValidHeaderSpecific(*submat,MAT_CLASSID,4)do { if (!*submat) return PetscError(((MPI_Comm)0x44000001),10245
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",4); if (
!PetscCheckPointer(*submat,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),10245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(*submat))->classid != MAT_CLASSID)
{ if (((PetscObject)(*submat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),10245,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
10246 }
10247
10248 if (mat->ops->restorelocalsubmatrix) {
10249 ierr = (*mat->ops->restorelocalsubmatrix)(mat,isrow,iscol,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10249,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10250 } else {
10251 ierr = MatDestroy(submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10251,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10252 }
10253 *submat = NULL((void*)0);
10254 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10255}
10256
10257/* --------------------------------------------------------*/
10258/*@
10259 MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10260
10261 Collective on Mat
10262
10263 Input Parameter:
10264. mat - the matrix
10265
10266 Output Parameter:
10267. is - if any rows have zero diagonals this contains the list of them
10268
10269 Level: developer
10270
10271.seealso: MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
10272@*/
10273PetscErrorCode MatFindZeroDiagonals(Mat mat,IS *is)
10274{
10275 PetscErrorCode ierr;
10276
10277 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10277; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10278 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10278
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10278,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10279 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10279,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10280 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),10280,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
10281 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),10281,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
10282
10283 if (!mat->ops->findzerodiagonals) {
10284 Vec diag;
10285 const PetscScalar *a;
10286 PetscInt *rows;
10287 PetscInt rStart, rEnd, r, nrow = 0;
10288
10289 ierr = MatCreateVecs(mat, &diag, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10289,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10290 ierr = MatGetDiagonal(mat, diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10290,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10291 ierr = MatGetOwnershipRange(mat, &rStart, &rEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10291,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10292 ierr = VecGetArrayRead(diag, &a);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10292,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10293 for (r = 0; r < rEnd-rStart; ++r) if (a[r] == 0.0) ++nrow;
10294 ierr = PetscMalloc1(nrow, &rows)PetscMallocA(1,PETSC_FALSE,10294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nrow)*sizeof(**(&rows)),(&rows))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10295 nrow = 0;
10296 for (r = 0; r < rEnd-rStart; ++r) if (a[r] == 0.0) rows[nrow++] = r+rStart;
10297 ierr = VecRestoreArrayRead(diag, &a);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10297,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10298 ierr = VecDestroy(&diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10298,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10299 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) mat), nrow, rows, PETSC_OWN_POINTER, is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10299,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10300 } else {
10301 ierr = (*mat->ops->findzerodiagonals)(mat, is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10301,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10302 }
10303 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10304}
10305
10306/*@
10307 MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10308
10309 Collective on Mat
10310
10311 Input Parameter:
10312. mat - the matrix
10313
10314 Output Parameter:
10315. is - contains the list of rows with off block diagonal entries
10316
10317 Level: developer
10318
10319.seealso: MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
10320@*/
10321PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat,IS *is)
10322{
10323 PetscErrorCode ierr;
10324
10325 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10325; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10326 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10326
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10326,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10326,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10326,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10327 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10327,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10328 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),10328,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
10329 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),10329,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
10330
10331 if (!mat->ops->findoffblockdiagonalentries) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"This matrix type does not have a find off block diagonal entries defined")return PetscError(PetscObjectComm((PetscObject)mat),10331,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"This matrix type does not have a find off block diagonal entries defined"
)
;
10332 ierr = (*mat->ops->findoffblockdiagonalentries)(mat,is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10332,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10333 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10334}
10335
10336/*@C
10337 MatInvertBlockDiagonal - Inverts the block diagonal entries.
10338
10339 Collective on Mat
10340
10341 Input Parameters:
10342. mat - the matrix
10343
10344 Output Parameters:
10345. values - the block inverses in column major order (FORTRAN-like)
10346
10347 Note:
10348 This routine is not available from Fortran.
10349
10350 Level: advanced
10351
10352.seealso: MatInvertBockDiagonalMat
10353@*/
10354PetscErrorCode MatInvertBlockDiagonal(Mat mat,const PetscScalar **values)
10355{
10356 PetscErrorCode ierr;
10357
10358 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10358; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10359 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10359
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10359,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10359,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10359,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10360 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),10360,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
10361 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),10361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
10362 if (!mat->ops->invertblockdiagonal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported")return PetscError(((MPI_Comm)0x44000001),10362,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Not supported")
;
10363 ierr = (*mat->ops->invertblockdiagonal)(mat,values);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10363,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10364 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10365}
10366
10367/*@C
10368 MatInvertVariableBlockDiagonal - Inverts the block diagonal entries.
10369
10370 Collective on Mat
10371
10372 Input Parameters:
10373+ mat - the matrix
10374. nblocks - the number of blocks
10375- bsizes - the size of each block
10376
10377 Output Parameters:
10378. values - the block inverses in column major order (FORTRAN-like)
10379
10380 Note:
10381 This routine is not available from Fortran.
10382
10383 Level: advanced
10384
10385.seealso: MatInvertBockDiagonal()
10386@*/
10387PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat,PetscInt nblocks,const PetscInt *bsizes,PetscScalar *values)
10388{
10389 PetscErrorCode ierr;
10390
10391 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10391; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10392 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10392
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10392,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10392,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10392,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10393 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),10393,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
10394 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),10394,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
10395 if (!mat->ops->invertvariableblockdiagonal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported")return PetscError(((MPI_Comm)0x44000001),10395,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Not supported")
;
10396 ierr = (*mat->ops->invertvariableblockdiagonal)(mat,nblocks,bsizes,values);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10396,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10397 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10398}
10399
10400/*@
10401 MatInvertBlockDiagonalMat - set matrix C to be the inverted block diagonal of matrix A
10402
10403 Collective on Mat
10404
10405 Input Parameters:
10406. A - the matrix
10407
10408 Output Parameters:
10409. C - matrix with inverted block diagonal of A. This matrix should be created and may have its type set.
10410
10411 Notes: the blocksize of the matrix is used to determine the blocks on the diagonal of C
10412
10413 Level: advanced
10414
10415.seealso: MatInvertBockDiagonal()
10416@*/
10417PetscErrorCode MatInvertBlockDiagonalMat(Mat A,Mat C)
10418{
10419 PetscErrorCode ierr;
10420 const PetscScalar *vals;
10421 PetscInt *dnnz;
10422 PetscInt M,N,m,n,rstart,rend,bs,i,j;
10423
10424 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10424; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10425 ierr = MatInvertBlockDiagonal(A,&vals);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10425,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10426 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10426,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10427 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10427,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10428 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10429 ierr = MatSetSizes(C,m,n,M,N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10429,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10430 ierr = MatSetBlockSize(C,bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10430,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10431 ierr = PetscMalloc1(m/bs,&dnnz)PetscMallocA(1,PETSC_FALSE,10431,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(m/bs)*sizeof(**(&dnnz)),(&dnnz))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10431,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10432 for (j = 0; j < m/bs; j++) dnnz[j] = 1;
10433 ierr = MatXAIJSetPreallocation(C,bs,dnnz,NULL((void*)0),NULL((void*)0),NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10433,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10434 ierr = PetscFree(dnnz)((*PetscTrFree)((void*)(dnnz),10434,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((dnnz) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10434,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10435 ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10435,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10436 ierr = MatSetOption(C,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10436,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10437 for (i = rstart/bs; i < rend/bs; i++) {
10438 ierr = MatSetValuesBlocked(C,1,&i,1,&i,&vals[(i-rstart/bs)*bs*bs],INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10438,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10439 }
10440 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10440,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10441 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10441,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10442 ierr = MatSetOption(C,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10442,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10443 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10444}
10445
10446/*@C
10447 MatTransposeColoringDestroy - Destroys a coloring context for matrix product C=A*B^T that was created
10448 via MatTransposeColoringCreate().
10449
10450 Collective on MatTransposeColoring
10451
10452 Input Parameter:
10453. c - coloring context
10454
10455 Level: intermediate
10456
10457.seealso: MatTransposeColoringCreate()
10458@*/
10459PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10460{
10461 PetscErrorCode ierr;
10462 MatTransposeColoring matcolor=*c;
10463
10464 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10464; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10465 if (!matcolor) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10466 if (--((PetscObject)matcolor)->refct > 0) {matcolor = 0; PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;}
10467
10468 ierr = PetscFree3(matcolor->ncolumns,matcolor->nrows,matcolor->colorforrow)PetscFreeA(3,10468,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(matcolor->ncolumns),&(matcolor->nrows),&
(matcolor->colorforrow))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10468,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10469 ierr = PetscFree(matcolor->rows)((*PetscTrFree)((void*)(matcolor->rows),10469,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((matcolor->rows) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10469,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10470 ierr = PetscFree(matcolor->den2sp)((*PetscTrFree)((void*)(matcolor->den2sp),10470,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((matcolor->den2sp) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10470,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10471 ierr = PetscFree(matcolor->colorforcol)((*PetscTrFree)((void*)(matcolor->colorforcol),10471,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c") || (
(matcolor->colorforcol) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10471,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10472 ierr = PetscFree(matcolor->columns)((*PetscTrFree)((void*)(matcolor->columns),10472,__func__,
"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c") || ((
matcolor->columns) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10472,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10473 if (matcolor->brows>0) {
10474 ierr = PetscFree(matcolor->lstart)((*PetscTrFree)((void*)(matcolor->lstart),10474,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((matcolor->lstart) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10474,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10475 }
10476 ierr = PetscHeaderDestroy(c)(PetscHeaderDestroy_Private((PetscObject)(*c)) || ((*PetscTrFree
)((void*)(*c),10476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((*c) = 0,0)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10477 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10478}
10479
10480/*@C
10481 MatTransColoringApplySpToDen - Given a symbolic matrix product C=A*B^T for which
10482 a MatTransposeColoring context has been created, computes a dense B^T by Apply
10483 MatTransposeColoring to sparse B.
10484
10485 Collective on MatTransposeColoring
10486
10487 Input Parameters:
10488+ B - sparse matrix B
10489. Btdense - symbolic dense matrix B^T
10490- coloring - coloring context created with MatTransposeColoringCreate()
10491
10492 Output Parameter:
10493. Btdense - dense matrix B^T
10494
10495 Level: advanced
10496
10497 Notes:
10498 These are used internally for some implementations of MatRARt()
10499
10500.seealso: MatTransposeColoringCreate(), MatTransposeColoringDestroy(), MatTransColoringApplyDenToSp()
10501
10502@*/
10503PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring,Mat B,Mat Btdense)
10504{
10505 PetscErrorCode ierr;
10506
10507 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10507; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10508 PetscValidHeaderSpecific(B,MAT_CLASSID,1)do { if (!B) return PetscError(((MPI_Comm)0x44000001),10508,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),10508,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),10508,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10508,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10509 PetscValidHeaderSpecific(Btdense,MAT_CLASSID,2)do { if (!Btdense) return PetscError(((MPI_Comm)0x44000001),10509
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(Btdense,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),10509,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(Btdense))->classid != MAT_CLASSID)
{ if (((PetscObject)(Btdense))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10509,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),10509,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
10510 PetscValidHeaderSpecific(coloring,MAT_TRANSPOSECOLORING_CLASSID,3)do { if (!coloring) return PetscError(((MPI_Comm)0x44000001),
10510,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(coloring,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),10510,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(coloring))->classid != MAT_TRANSPOSECOLORING_CLASSID
) { if (((PetscObject)(coloring))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10510,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),10510,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
10511
10512 if (!B->ops->transcoloringapplysptoden) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported for this matrix type %s",((PetscObject)B)->type_name)return PetscError(((MPI_Comm)0x44000001),10512,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Not supported for this matrix type %s"
,((PetscObject)B)->type_name)
;
10513 ierr = (B->ops->transcoloringapplysptoden)(coloring,B,Btdense);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10513,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10514 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10515}
10516
10517/*@C
10518 MatTransColoringApplyDenToSp - Given a symbolic matrix product Csp=A*B^T for which
10519 a MatTransposeColoring context has been created and a dense matrix Cden=A*Btdense
10520 in which Btdens is obtained from MatTransColoringApplySpToDen(), recover sparse matrix
10521 Csp from Cden.
10522
10523 Collective on MatTransposeColoring
10524
10525 Input Parameters:
10526+ coloring - coloring context created with MatTransposeColoringCreate()
10527- Cden - matrix product of a sparse matrix and a dense matrix Btdense
10528
10529 Output Parameter:
10530. Csp - sparse matrix
10531
10532 Level: advanced
10533
10534 Notes:
10535 These are used internally for some implementations of MatRARt()
10536
10537.seealso: MatTransposeColoringCreate(), MatTransposeColoringDestroy(), MatTransColoringApplySpToDen()
10538
10539@*/
10540PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring,Mat Cden,Mat Csp)
10541{
10542 PetscErrorCode ierr;
10543
10544 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10544; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10545 PetscValidHeaderSpecific(matcoloring,MAT_TRANSPOSECOLORING_CLASSID,1)do { if (!matcoloring) return PetscError(((MPI_Comm)0x44000001
),10545,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(matcoloring,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),10545,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(matcoloring))->classid != MAT_TRANSPOSECOLORING_CLASSID
) { if (((PetscObject)(matcoloring))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),10545,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10545,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10546 PetscValidHeaderSpecific(Cden,MAT_CLASSID,2)do { if (!Cden) return PetscError(((MPI_Comm)0x44000001),10546
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(Cden,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(Cden))->classid != MAT_CLASSID) { if
(((PetscObject)(Cden))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),10546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),10546,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
10547 PetscValidHeaderSpecific(Csp,MAT_CLASSID,3)do { if (!Csp) return PetscError(((MPI_Comm)0x44000001),10547
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(Csp,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10547,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(Csp))->classid != MAT_CLASSID) { if
(((PetscObject)(Csp))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10547,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),10547,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
10548
10549 if (!Csp->ops->transcoloringapplydentosp) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported for this matrix type %s",((PetscObject)Csp)->type_name)return PetscError(((MPI_Comm)0x44000001),10549,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Not supported for this matrix type %s"
,((PetscObject)Csp)->type_name)
;
10550 ierr = (Csp->ops->transcoloringapplydentosp)(matcoloring,Cden,Csp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10550,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10551 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10552}
10553
10554/*@C
10555 MatTransposeColoringCreate - Creates a matrix coloring context for matrix product C=A*B^T.
10556
10557 Collective on Mat
10558
10559 Input Parameters:
10560+ mat - the matrix product C
10561- iscoloring - the coloring of the matrix; usually obtained with MatColoringCreate() or DMCreateColoring()
10562
10563 Output Parameter:
10564. color - the new coloring context
10565
10566 Level: intermediate
10567
10568.seealso: MatTransposeColoringDestroy(), MatTransColoringApplySpToDen(),
10569 MatTransColoringApplyDenToSp()
10570@*/
10571PetscErrorCode MatTransposeColoringCreate(Mat mat,ISColoring iscoloring,MatTransposeColoring *color)
10572{
10573 MatTransposeColoring c;
10574 MPI_Comm comm;
10575 PetscErrorCode ierr;
10576
10577 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10577; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10578 ierr = PetscLogEventBegin(MAT_TransposeColoringCreate,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeColoringCreate].active) ? (*PetscLogPLB)((MAT_TransposeColoringCreate
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10578,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10579 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10579,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10580 ierr = PetscHeaderCreate(c,MAT_TRANSPOSECOLORING_CLASSID,"MatTransposeColoring","Matrix product C=A*B^T via coloring","Mat",comm,MatTransposeColoringDestroy,NULL)(PetscMallocA(1,PETSC_TRUE,10580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&(c)))),((&(c)))) || PetscHeaderCreate_Private
((PetscObject)c,MAT_TRANSPOSECOLORING_CLASSID,"MatTransposeColoring"
,"Matrix product C=A*B^T via coloring","Mat",comm,(PetscObjectDestroyFunction
)MatTransposeColoringDestroy,(PetscObjectViewFunction)((void*
)0)) || ((PetscLogPHC) ? (*PetscLogPHC)((PetscObject)(c)) : 0
) || PetscLogObjectMemory((PetscObject)c,sizeof(*(c))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10581
10582 c->ctype = iscoloring->ctype;
10583 if (mat->ops->transposecoloringcreate) {
10584 ierr = (*mat->ops->transposecoloringcreate)(mat,iscoloring,c);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10584,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10585 } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Code not yet written for this matrix type")return PetscError(PetscObjectComm((PetscObject)mat),10585,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Code not yet written for this matrix type")
;
10586
10587 *color = c;
10588 ierr = PetscLogEventEnd(MAT_TransposeColoringCreate,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeColoringCreate].active) ? (*PetscLogPLE)((MAT_TransposeColoringCreate
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10588,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10589 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10590}
10591
10592/*@
10593 MatGetNonzeroState - Returns a 64 bit integer representing the current state of nonzeros in the matrix. If the
10594 matrix has had no new nonzero locations added to the matrix since the previous call then the value will be the
10595 same, otherwise it will be larger
10596
10597 Not Collective
10598
10599 Input Parameter:
10600. A - the matrix
10601
10602 Output Parameter:
10603. state - the current state
10604
10605 Notes:
10606 You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10607 different matrices
10608
10609 Level: intermediate
10610
10611@*/
10612PetscErrorCode MatGetNonzeroState(Mat mat,PetscObjectState *state)
10613{
10614 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10614; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10615 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10615
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10615,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10615,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10615,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10616 *state = mat->nonzerostate;
10617 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10618}
10619
10620/*@
10621 MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
10622 matrices from each processor
10623
10624 Collective
10625
10626 Input Parameters:
10627+ comm - the communicators the parallel matrix will live on
10628. seqmat - the input sequential matrices
10629. n - number of local columns (or PETSC_DECIDE)
10630- reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10631
10632 Output Parameter:
10633. mpimat - the parallel matrix generated
10634
10635 Level: advanced
10636
10637 Notes:
10638 The number of columns of the matrix in EACH processor MUST be the same.
10639
10640@*/
10641PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm,Mat seqmat,PetscInt n,MatReuse reuse,Mat *mpimat)
10642{
10643 PetscErrorCode ierr;
10644
10645 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10645; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10646 if (!seqmat->ops->creatempimatconcatenateseqmat) SETERRQ1(PetscObjectComm((PetscObject)seqmat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)seqmat)->type_name)return PetscError(PetscObjectComm((PetscObject)seqmat),10646,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)seqmat)->
type_name)
;
10647 if (reuse == MAT_REUSE_MATRIX && seqmat == *mpimat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix")return PetscError(((MPI_Comm)0x44000001),10647,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix"
)
;
10648
10649 ierr = PetscLogEventBegin(MAT_Merge,seqmat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Merge].active) ? (*PetscLogPLB)((MAT_Merge),0,(PetscObject
)(seqmat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10649,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10650 ierr = (*seqmat->ops->creatempimatconcatenateseqmat)(comm,seqmat,n,reuse,mpimat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10650,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10651 ierr = PetscLogEventEnd(MAT_Merge,seqmat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Merge].active) ? (*PetscLogPLE)((MAT_Merge),0,(PetscObject
)(seqmat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10651,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10652 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10653}
10654
10655/*@
10656 MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent
10657 ranks' ownership ranges.
10658
10659 Collective on A
10660
10661 Input Parameters:
10662+ A - the matrix to create subdomains from
10663- N - requested number of subdomains
10664
10665
10666 Output Parameters:
10667+ n - number of subdomains resulting on this rank
10668- iss - IS list with indices of subdomains on this rank
10669
10670 Level: advanced
10671
10672 Notes:
10673 number of subdomains must be smaller than the communicator size
10674@*/
10675PetscErrorCode MatSubdomainsCreateCoalesce(Mat A,PetscInt N,PetscInt *n,IS *iss[])
10676{
10677 MPI_Comm comm,subcomm;
10678 PetscMPIInt size,rank,color;
10679 PetscInt rstart,rend,k;
10680 PetscErrorCode ierr;
10681
10682 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10682; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10683 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10684 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10685 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10686 if (N < 1 || N >= (PetscInt)size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subdomains must be > 0 and < %D, got N = %D",size,N)return PetscError(((MPI_Comm)0x44000001),10686,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"number of subdomains must be > 0 and < %D, got N = %D"
,size,N)
;
10687 *n = 1;
10688 k = ((PetscInt)size)/N + ((PetscInt)size%N>0); /* There are up to k ranks to a color */
10689 color = rank/k;
10690 ierr = MPI_Comm_split(comm,color,rank,&subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10690,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10691 ierr = PetscMalloc1(1,iss)PetscMallocA(1,PETSC_FALSE,10691,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**(iss)),(iss))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10691,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10692 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10692,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10693 ierr = ISCreateStride(subcomm,rend-rstart,rstart,1,iss[0]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10693,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10694 ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10694,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10695 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10696}
10697
10698/*@
10699 MatGalerkin - Constructs the coarse grid problem via Galerkin projection.
10700
10701 If the interpolation and restriction operators are the same, uses MatPtAP.
10702 If they are not the same, use MatMatMatMult.
10703
10704 Once the coarse grid problem is constructed, correct for interpolation operators
10705 that are not of full rank, which can legitimately happen in the case of non-nested
10706 geometric multigrid.
10707
10708 Input Parameters:
10709+ restrct - restriction operator
10710. dA - fine grid matrix
10711. interpolate - interpolation operator
10712. reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10713- fill - expected fill, use PETSC_DEFAULT if you do not have a good estimate
10714
10715 Output Parameters:
10716. A - the Galerkin coarse matrix
10717
10718 Options Database Key:
10719. -pc_mg_galerkin <both,pmat,mat,none>
10720
10721 Level: developer
10722
10723.seealso: MatPtAP(), MatMatMatMult()
10724@*/
10725PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
10726{
10727 PetscErrorCode ierr;
10728 IS zerorows;
10729 Vec diag;
10730
10731 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10731; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10732 if (reuse == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),10732,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
10733 /* Construct the coarse grid matrix */
10734 if (interpolate == restrct) {
10735 ierr = MatPtAP(dA,interpolate,reuse,fill,A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10735,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10736 } else {
10737 ierr = MatMatMatMult(restrct,dA,interpolate,reuse,fill,A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10737,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10738 }
10739
10740 /* If the interpolation matrix is not of full rank, A will have zero rows.
10741 This can legitimately happen in the case of non-nested geometric multigrid.
10742 In that event, we set the rows of the matrix to the rows of the identity,
10743 ignoring the equations (as the RHS will also be zero). */
10744
10745 ierr = MatFindZeroRows(*A, &zerorows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10745,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10746
10747 if (zerorows != NULL((void*)0)) { /* if there are any zero rows */
10748 ierr = MatCreateVecs(*A, &diag, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10748,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10749 ierr = MatGetDiagonal(*A, diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10749,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10750 ierr = VecISSet(diag, zerorows, 1.0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10750,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10751 ierr = MatDiagonalSet(*A, diag, INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10751,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10752 ierr = VecDestroy(&diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10752,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10753 ierr = ISDestroy(&zerorows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10753,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10754 }
10755 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10756}
10757
10758/*@C
10759 MatSetOperation - Allows user to set a matrix operation for any matrix type
10760
10761 Logically Collective on Mat
10762
10763 Input Parameters:
10764+ mat - the matrix
10765. op - the name of the operation
10766- f - the function that provides the operation
10767
10768 Level: developer
10769
10770 Usage:
10771$ extern PetscErrorCode usermult(Mat,Vec,Vec);
10772$ ierr = MatCreateXXX(comm,...&A);
10773$ ierr = MatSetOperation(A,MATOP_MULT,(void(*)(void))usermult);
10774
10775 Notes:
10776 See the file include/petscmat.h for a complete list of matrix
10777 operations, which all have the form MATOP_<OPERATION>, where
10778 <OPERATION> is the name (in all capital letters) of the
10779 user interface routine (e.g., MatMult() -> MATOP_MULT).
10780
10781 All user-provided functions (except for MATOP_DESTROY) should have the same calling
10782 sequence as the usual matrix interface routines, since they
10783 are intended to be accessed via the usual matrix interface
10784 routines, e.g.,
10785$ MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
10786
10787 In particular each function MUST return an error code of 0 on success and
10788 nonzero on failure.
10789
10790 This routine is distinct from MatShellSetOperation() in that it can be called on any matrix type.
10791
10792.seealso: MatGetOperation(), MatCreateShell(), MatShellSetContext(), MatShellSetOperation()
10793@*/
10794PetscErrorCode MatSetOperation(Mat mat,MatOperation op,void (*f)(void))
10795{
10796 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10796; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10797 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10797
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10797,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10797,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10797,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10798 if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))(mat->ops->view)) {
10799 mat->ops->viewnative = mat->ops->view;
10800 }
10801 (((void(**)(void))mat->ops)[op]) = f;
10802 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10803}
10804
10805/*@C
10806 MatGetOperation - Gets a matrix operation for any matrix type.
10807
10808 Not Collective
10809
10810 Input Parameters:
10811+ mat - the matrix
10812- op - the name of the operation
10813
10814 Output Parameter:
10815. f - the function that provides the operation
10816
10817 Level: developer
10818
10819 Usage:
10820$ PetscErrorCode (*usermult)(Mat,Vec,Vec);
10821$ ierr = MatGetOperation(A,MATOP_MULT,(void(**)(void))&usermult);
10822
10823 Notes:
10824 See the file include/petscmat.h for a complete list of matrix
10825 operations, which all have the form MATOP_<OPERATION>, where
10826 <OPERATION> is the name (in all capital letters) of the
10827 user interface routine (e.g., MatMult() -> MATOP_MULT).
10828
10829 This routine is distinct from MatShellGetOperation() in that it can be called on any matrix type.
10830
10831.seealso: MatSetOperation(), MatCreateShell(), MatShellGetContext(), MatShellGetOperation()
10832@*/
10833PetscErrorCode MatGetOperation(Mat mat,MatOperation op,void(**f)(void))
10834{
10835 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10835; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10836 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10836
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10836,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10836,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10836,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10837 *f = (((void (**)(void))mat->ops)[op]);
10838 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10839}
10840
10841/*@
10842 MatHasOperation - Determines whether the given matrix supports the particular
10843 operation.
10844
10845 Not Collective
10846
10847 Input Parameters:
10848+ mat - the matrix
10849- op - the operation, for example, MATOP_GET_DIAGONAL
10850
10851 Output Parameter:
10852. has - either PETSC_TRUE or PETSC_FALSE
10853
10854 Level: advanced
10855
10856 Notes:
10857 See the file include/petscmat.h for a complete list of matrix
10858 operations, which all have the form MATOP_<OPERATION>, where
10859 <OPERATION> is the name (in all capital letters) of the
10860 user-level routine. E.g., MatNorm() -> MATOP_NORM.
10861
10862.seealso: MatCreateShell()
10863@*/
10864PetscErrorCode MatHasOperation(Mat mat,MatOperation op,PetscBool *has)
10865{
10866 PetscErrorCode ierr;
10867
10868 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10868; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10869 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10869
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10869,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10869,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10869,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10870 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10870,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10871 PetscValidPointer(has,3)do { if (!has) return PetscError(((MPI_Comm)0x44000001),10871
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(has,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),10871,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
10872 if (mat->ops->hasoperation) {
10873 ierr = (*mat->ops->hasoperation)(mat,op,has);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10873,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10874 } else {
10875 if (((void**)mat->ops)[op]) *has = PETSC_TRUE;
10876 else {
10877 *has = PETSC_FALSE;
10878 if (op == MATOP_CREATE_SUBMATRIX) {
10879 PetscMPIInt size;
10880
10881 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10881,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10882 if (size == 1) {
10883 ierr = MatHasOperation(mat,MATOP_CREATE_SUBMATRICES,has);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10883,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10884 }
10885 }
10886 }
10887 }
10888 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10889}
10890
10891/*@
10892 MatHasCongruentLayouts - Determines whether the rows and columns layouts
10893 of the matrix are congruent
10894
10895 Collective on mat
10896
10897 Input Parameters:
10898. mat - the matrix
10899
10900 Output Parameter:
10901. cong - either PETSC_TRUE or PETSC_FALSE
10902
10903 Level: beginner
10904
10905 Notes:
10906
10907.seealso: MatCreate(), MatSetSizes()
10908@*/
10909PetscErrorCode MatHasCongruentLayouts(Mat mat,PetscBool *cong)
10910{
10911 PetscErrorCode ierr;
10912
10913 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10913; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10914 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10914
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10914,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10914,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10914,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10915 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10915,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10916 PetscValidPointer(cong,2)do { if (!cong) return PetscError(((MPI_Comm)0x44000001),10916
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(cong,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),10916,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
10917 if (!mat->rmap || !mat->cmap) {
10918 *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
10919 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10920 }
10921 if (mat->congruentlayouts == PETSC_DECIDE-1) { /* first time we compare rows and cols layouts */
10922 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,cong);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10922,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10923 if (*cong) mat->congruentlayouts = 1;
10924 else mat->congruentlayouts = 0;
10925 } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
10926 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10927}
10928
10929/*@
10930 MatFreeIntermediateDataStructures - Free intermediate data structures created for reuse,
10931 e.g., matrx product of MatPtAP.
10932
10933 Collective on mat
10934
10935 Input Parameters:
10936. mat - the matrix
10937
10938 Output Parameter:
10939. mat - the matrix with intermediate data structures released
10940
10941 Level: advanced
10942
10943 Notes:
10944
10945.seealso: MatPtAP(), MatMatMult()
10946@*/
10947PetscErrorCode MatFreeIntermediateDataStructures(Mat mat)
10948{
10949 PetscErrorCode ierr;
10950
10951 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10951; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10952 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10952
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10952,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10952,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10952,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10953 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10953,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10954 if (mat->ops->freeintermediatedatastructures) {
10955 ierr = (*mat->ops->freeintermediatedatastructures)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10955,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10956 }
10957 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10958}