Bug Summary

File:mat/interface/matrix.c
Warning:line 9141, column 7
Value stored to 'ierr' is never read

Annotated Source Code

[?] Use j/k keys for keyboard navigation

1
2/*
3 This is where the abstract matrix operations are defined
4*/
5
6#include <petsc/private/matimpl.h> /*I "petscmat.h" I*/
7#include <petsc/private/isimpl.h>
8#include <petsc/private/vecimpl.h>
9
10/* Logging support */
11PetscClassId MAT_CLASSID;
12PetscClassId MAT_COLORING_CLASSID;
13PetscClassId MAT_FDCOLORING_CLASSID;
14PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
15
16PetscLogEvent MAT_Mult, MAT_Mults, MAT_MultConstrained, MAT_MultAdd, MAT_MultTranspose;
17PetscLogEvent MAT_MultTransposeConstrained, MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve,MAT_MatTrSolve;
18PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
19PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
20PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
21PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
22PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
23PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply,MAT_Transpose,MAT_FDColoringFunction, MAT_CreateSubMat;
24PetscLogEvent MAT_TransposeColoringCreate;
25PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
26PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric,MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
27PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
28PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
29PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
30PetscLogEvent MAT_MultHermitianTranspose,MAT_MultHermitianTransposeAdd;
31PetscLogEvent MAT_Getsymtranspose, MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
32PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
33PetscLogEvent MAT_Applypapt, MAT_Applypapt_numeric, MAT_Applypapt_symbolic, MAT_GetSequentialNonzeroStructure;
34PetscLogEvent MAT_GetMultiProcBlock;
35PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_SetValuesBatch;
36PetscLogEvent MAT_ViennaCLCopyToGPU;
37PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
38PetscLogEvent MAT_Merge,MAT_Residual,MAT_SetRandom;
39PetscLogEvent MATCOLORING_Apply,MATCOLORING_Comm,MATCOLORING_Local,MATCOLORING_ISCreate,MATCOLORING_SetUp,MATCOLORING_Weights;
40
41const char *const MatFactorTypes[] = {"NONE","LU","CHOLESKY","ILU","ICC","ILUDT","MatFactorType","MAT_FACTOR_",0};
42
43/*@
44 MatSetRandom - Sets all components of a matrix to random numbers. For sparse matrices that have been preallocated but not been assembled it randomly selects appropriate locations
45
46 Logically Collective on Mat
47
48 Input Parameters:
49+ x - the matrix
50- rctx - the random number context, formed by PetscRandomCreate(), or NULL and
51 it will create one internally.
52
53 Output Parameter:
54. x - the matrix
55
56 Example of Usage:
57.vb
58 PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
59 MatSetRandom(x,rctx);
60 PetscRandomDestroy(rctx);
61.ve
62
63 Level: intermediate
64
65
66.seealso: MatZeroEntries(), MatSetValues(), PetscRandomCreate(), PetscRandomDestroy()
67@*/
68PetscErrorCode MatSetRandom(Mat x,PetscRandom rctx)
69{
70 PetscErrorCode ierr;
71 PetscRandom randObj = NULL((void*)0);
72
73 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 73; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
74 PetscValidHeaderSpecific(x,MAT_CLASSID,1)do { if (!x) return PetscError(((MPI_Comm)0x44000001),74,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),74,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(x))->classid != MAT_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),74,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),74,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
75 if (rctx) PetscValidHeaderSpecific(rctx,PETSC_RANDOM_CLASSID,2)do { if (!rctx) return PetscError(((MPI_Comm)0x44000001),75,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(rctx
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),75,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",64,PETSC_ERROR_INITIAL
,"Invalid Pointer to Object: Parameter # %d",2); if (((PetscObject
)(rctx))->classid != PETSC_RANDOM_CLASSID) { if (((PetscObject
)(rctx))->classid == -1) return PetscError(((MPI_Comm)0x44000001
),75,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),75,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
76 PetscValidType(x,1)do { if (!((PetscObject)x)->type_name) return PetscError((
(MPI_Comm)0x44000001),76,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)x)->class_name,1); } while (0)
;
77
78 if (!x->ops->setrandom) SETERRQ1(PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Mat type %s",((PetscObject)x)->type_name)return PetscError(PetscObjectComm((PetscObject)x),78,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)x)->type_name)
;
79
80 if (!rctx) {
81 MPI_Comm comm;
82 ierr = PetscObjectGetComm((PetscObject)x,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),82,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
83 ierr = PetscRandomCreate(comm,&randObj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),83,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
84 ierr = PetscRandomSetFromOptions(randObj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),84,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
85 rctx = randObj;
86 }
87
88 ierr = PetscLogEventBegin(MAT_SetRandom,x,rctx,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetRandom].active) ? (*PetscLogPLB)((MAT_SetRandom),0,(PetscObject
)(x),(PetscObject)(rctx),(PetscObject)(0),(PetscObject)(0)) :
0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),88,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
89 ierr = (*x->ops->setrandom)(x,rctx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),89,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
90 ierr = PetscLogEventEnd(MAT_SetRandom,x,rctx,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetRandom].active) ? (*PetscLogPLE)((MAT_SetRandom),0,(PetscObject
)(x),(PetscObject)(rctx),(PetscObject)(0),(PetscObject)(0)) :
0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),90,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
91
92 ierr = MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),92,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
93 ierr = MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),93,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
94 ierr = PetscRandomDestroy(&randObj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),94,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
95 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
96}
97
98/*@
99 MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
100
101 Logically Collective on Mat
102
103 Input Parameters:
104. mat - the factored matrix
105
106 Output Parameter:
107+ pivot - the pivot value computed
108- row - the row that the zero pivot occurred. Note that this row must be interpreted carefully due to row reorderings and which processes
109 the share the matrix
110
111 Level: advanced
112
113 Notes:
114 This routine does not work for factorizations done with external packages.
115 This routine should only be called if MatGetFactorError() returns a value of MAT_FACTOR_NUMERIC_ZEROPIVOT
116
117 This can be called on non-factored matrices that come from, for example, matrices used in SOR.
118
119.seealso: MatZeroEntries(), MatFactor(), MatGetFactor(), MatFactorSymbolic(), MatFactorClearError(), MatFactorGetErrorZeroPivot()
120@*/
121PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat,PetscReal *pivot,PetscInt *row)
122{
123 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 123; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
124 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),124,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),124,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),124,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),124,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
125 *pivot = mat->factorerror_zeropivot_value;
126 *row = mat->factorerror_zeropivot_row;
127 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
128}
129
130/*@
131 MatFactorGetError - gets the error code from a factorization
132
133 Logically Collective on Mat
134
135 Input Parameters:
136. mat - the factored matrix
137
138 Output Parameter:
139. err - the error code
140
141 Level: advanced
142
143 Notes:
144 This can be called on non-factored matrices that come from, for example, matrices used in SOR.
145
146.seealso: MatZeroEntries(), MatFactor(), MatGetFactor(), MatFactorSymbolic(), MatFactorClearError(), MatFactorGetErrorZeroPivot()
147@*/
148PetscErrorCode MatFactorGetError(Mat mat,MatFactorError *err)
149{
150 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 150; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
151 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),151,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),151,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),151,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),151,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
152 *err = mat->factorerrortype;
153 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
154}
155
156/*@
157 MatFactorClearError - clears the error code in a factorization
158
159 Logically Collective on Mat
160
161 Input Parameter:
162. mat - the factored matrix
163
164 Level: developer
165
166 Notes:
167 This can be called on non-factored matrices that come from, for example, matrices used in SOR.
168
169.seealso: MatZeroEntries(), MatFactor(), MatGetFactor(), MatFactorSymbolic(), MatFactorGetError(), MatFactorGetErrorZeroPivot()
170@*/
171PetscErrorCode MatFactorClearError(Mat mat)
172{
173 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 173; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
174 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),174,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),174,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),174,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),174,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
175 mat->factorerrortype = MAT_FACTOR_NOERROR;
176 mat->factorerror_zeropivot_value = 0.0;
177 mat->factorerror_zeropivot_row = 0;
178 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
179}
180
181PETSC_INTERNextern __attribute__((visibility ("hidden"))) PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat,PetscBool cols,PetscReal tol,IS *nonzero)
182{
183 PetscErrorCode ierr;
184 Vec r,l;
185 const PetscScalar *al;
186 PetscInt i,nz,gnz,N,n;
187
188 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 188; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
189 ierr = MatCreateVecs(mat,&r,&l);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),189,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
190 if (!cols) { /* nonzero rows */
191 ierr = MatGetSize(mat,&N,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
192 ierr = MatGetLocalSize(mat,&n,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),192,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
193 ierr = VecSet(l,0.0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),193,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
194 ierr = VecSetRandom(r,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),194,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
195 ierr = MatMult(mat,r,l);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),195,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
196 ierr = VecGetArrayRead(l,&al);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),196,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
197 } else { /* nonzero columns */
198 ierr = MatGetSize(mat,NULL((void*)0),&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),198,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
199 ierr = MatGetLocalSize(mat,NULL((void*)0),&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),199,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
200 ierr = VecSet(r,0.0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),200,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
201 ierr = VecSetRandom(l,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),201,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
202 ierr = MatMultTranspose(mat,l,r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),202,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
203 ierr = VecGetArrayRead(r,&al);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),203,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
204 }
205 if (tol <= 0.0) { for (i=0,nz=0;i<n;i++) if (al[i] != 0.0) nz++; }
206 else { for (i=0,nz=0;i<n;i++) if (PetscAbsScalar(al[i])fabs(al[i]) > tol) nz++; }
207 ierr = MPIU_Allreduce(&nz,&gnz,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)mat))(PetscAllreduceBarrierCheck(PetscObjectComm((PetscObject)mat)
,1,207,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((&nz),(&gnz)
,(1),(((MPI_Datatype)0x4c000405)),((MPI_Op)(0x58000003)),(PetscObjectComm
((PetscObject)mat)))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),207,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
208 if (gnz != N) {
209 PetscInt *nzr;
210 ierr = PetscMalloc1(nz,&nzr)PetscMallocA(1,PETSC_FALSE,210,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nz)*sizeof(**(&nzr)),(&nzr))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),210,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
211 if (nz) {
212 if (tol < 0) { for (i=0,nz=0;i<n;i++) if (al[i] != 0.0) nzr[nz++] = i; }
213 else { for (i=0,nz=0;i<n;i++) if (PetscAbsScalar(al[i])fabs(al[i]) > tol) nzr[nz++] = i; }
214 }
215 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)mat),nz,nzr,PETSC_OWN_POINTER,nonzero);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),215,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
216 } else *nonzero = NULL((void*)0);
217 if (!cols) { /* nonzero rows */
218 ierr = VecRestoreArrayRead(l,&al);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),218,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
219 } else {
220 ierr = VecRestoreArrayRead(r,&al);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),220,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
221 }
222 ierr = VecDestroy(&l);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
223 ierr = VecDestroy(&r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),223,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
224 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
225}
226
227/*@
228 MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
229
230 Input Parameter:
231. A - the matrix
232
233 Output Parameter:
234. keptrows - the rows that are not completely zero
235
236 Notes:
237 keptrows is set to NULL if all rows are nonzero.
238
239 Level: intermediate
240
241 @*/
242PetscErrorCode MatFindNonzeroRows(Mat mat,IS *keptrows)
243{
244 PetscErrorCode ierr;
245
246 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 246; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
247 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),247,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),247,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),247,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),247,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
248 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
249 PetscValidPointer(keptrows,2)do { if (!keptrows) return PetscError(((MPI_Comm)0x44000001),
249,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(keptrows,PETSC_CHAR)) return PetscError(
((MPI_Comm)0x44000001),249,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
250 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),250,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
251 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),251,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
252 if (!mat->ops->findnonzerorows) {
253 ierr = MatFindNonzeroRowsOrCols_Basic(mat,PETSC_FALSE,0.0,keptrows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),253,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
254 } else {
255 ierr = (*mat->ops->findnonzerorows)(mat,keptrows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),255,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
256 }
257 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
258}
259
260/*@
261 MatFindZeroRows - Locate all rows that are completely zero in the matrix
262
263 Input Parameter:
264. A - the matrix
265
266 Output Parameter:
267. zerorows - the rows that are completely zero
268
269 Notes:
270 zerorows is set to NULL if no rows are zero.
271
272 Level: intermediate
273
274 @*/
275PetscErrorCode MatFindZeroRows(Mat mat,IS *zerorows)
276{
277 PetscErrorCode ierr;
278 IS keptrows;
279 PetscInt m, n;
280
281 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),281,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),281,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),281,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),281,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
282 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),282,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
283
284 ierr = MatFindNonzeroRows(mat, &keptrows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),284,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
285 /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
286 In keeping with this convention, we set zerorows to NULL if there are no zero
287 rows. */
288 if (keptrows == NULL((void*)0)) {
289 *zerorows = NULL((void*)0);
290 } else {
291 ierr = MatGetOwnershipRange(mat,&m,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),291,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
292 ierr = ISComplement(keptrows,m,n,zerorows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),292,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
293 ierr = ISDestroy(&keptrows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),293,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
294 }
295 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
296}
297
298/*@
299 MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
300
301 Not Collective
302
303 Input Parameters:
304. A - the matrix
305
306 Output Parameters:
307. a - the diagonal part (which is a SEQUENTIAL matrix)
308
309 Notes:
310 see the manual page for MatCreateAIJ() for more information on the "diagonal part" of the matrix.
311 Use caution, as the reference count on the returned matrix is not incremented and it is used as
312 part of the containing MPI Mat's normal operation.
313
314 Level: advanced
315
316@*/
317PetscErrorCode MatGetDiagonalBlock(Mat A,Mat *a)
318{
319 PetscErrorCode ierr;
320
321 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 321; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
322 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),322,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),322,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),322,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),322,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
323 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),323,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
324 PetscValidPointer(a,3)do { if (!a) return PetscError(((MPI_Comm)0x44000001),324,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(a,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),324,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
325 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),325,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
326 if (!A->ops->getdiagonalblock) {
327 PetscMPIInt size;
328 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),328,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
329 if (size == 1) {
330 *a = A;
331 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
332 } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Not coded for this matrix type")return PetscError(PetscObjectComm((PetscObject)A),332,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Not coded for this matrix type")
;
333 }
334 ierr = (*A->ops->getdiagonalblock)(A,a);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
335 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
336}
337
338/*@
339 MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
340
341 Collective on Mat
342
343 Input Parameters:
344. mat - the matrix
345
346 Output Parameter:
347. trace - the sum of the diagonal entries
348
349 Level: advanced
350
351@*/
352PetscErrorCode MatGetTrace(Mat mat,PetscScalar *trace)
353{
354 PetscErrorCode ierr;
355 Vec diag;
356
357 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 357; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
358 ierr = MatCreateVecs(mat,&diag,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),358,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
359 ierr = MatGetDiagonal(mat,diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),359,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
360 ierr = VecSum(diag,trace);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),360,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
361 ierr = VecDestroy(&diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
362 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
363}
364
365/*@
366 MatRealPart - Zeros out the imaginary part of the matrix
367
368 Logically Collective on Mat
369
370 Input Parameters:
371. mat - the matrix
372
373 Level: advanced
374
375
376.seealso: MatImaginaryPart()
377@*/
378PetscErrorCode MatRealPart(Mat mat)
379{
380 PetscErrorCode ierr;
381
382 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 382; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
383 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),383,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),383,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),383,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),383,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
384 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),384,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
385 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),385,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
386 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),386,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
387 if (!mat->ops->realpart) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),387,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
388 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),388,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
389 ierr = (*mat->ops->realpart)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),389,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
390 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
391}
392
393/*@C
394 MatGetGhosts - Get the global index of all ghost nodes defined by the sparse matrix
395
396 Collective on Mat
397
398 Input Parameter:
399. mat - the matrix
400
401 Output Parameters:
402+ nghosts - number of ghosts (note for BAIJ matrices there is one ghost for each block)
403- ghosts - the global indices of the ghost points
404
405 Notes:
406 the nghosts and ghosts are suitable to pass into VecCreateGhost()
407
408 Level: advanced
409
410@*/
411PetscErrorCode MatGetGhosts(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
412{
413 PetscErrorCode ierr;
414
415 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 415; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
416 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),416,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),416,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),416,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),416,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
417 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),417,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
418 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),418,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
419 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),419,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
420 if (!mat->ops->getghosts) {
421 if (nghosts) *nghosts = 0;
422 if (ghosts) *ghosts = 0;
423 } else {
424 ierr = (*mat->ops->getghosts)(mat,nghosts,ghosts);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),424,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
425 }
426 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
427}
428
429
430/*@
431 MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
432
433 Logically Collective on Mat
434
435 Input Parameters:
436. mat - the matrix
437
438 Level: advanced
439
440
441.seealso: MatRealPart()
442@*/
443PetscErrorCode MatImaginaryPart(Mat mat)
444{
445 PetscErrorCode ierr;
446
447 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 447; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
448 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),448,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),448,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),448,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),448,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
449 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),449,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
450 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),450,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
451 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),451,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
452 if (!mat->ops->imaginarypart) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),452,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
453 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),453,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
454 ierr = (*mat->ops->imaginarypart)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),454,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
455 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
456}
457
458/*@
459 MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for BAIJ matrices)
460
461 Not Collective
462
463 Input Parameter:
464. mat - the matrix
465
466 Output Parameters:
467+ missing - is any diagonal missing
468- dd - first diagonal entry that is missing (optional) on this process
469
470 Level: advanced
471
472
473.seealso: MatRealPart()
474@*/
475PetscErrorCode MatMissingDiagonal(Mat mat,PetscBool *missing,PetscInt *dd)
476{
477 PetscErrorCode ierr;
478
479 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 479; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
480 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),480,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),480,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),480,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),480,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
481 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),481,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
482 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),482,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
483 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),483,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
484 if (!mat->ops->missingdiagonal) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),484,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
485 ierr = (*mat->ops->missingdiagonal)(mat,missing,dd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),485,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
486 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
487}
488
489/*@C
490 MatGetRow - Gets a row of a matrix. You MUST call MatRestoreRow()
491 for each row that you get to ensure that your application does
492 not bleed memory.
493
494 Not Collective
495
496 Input Parameters:
497+ mat - the matrix
498- row - the row to get
499
500 Output Parameters:
501+ ncols - if not NULL, the number of nonzeros in the row
502. cols - if not NULL, the column numbers
503- vals - if not NULL, the values
504
505 Notes:
506 This routine is provided for people who need to have direct access
507 to the structure of a matrix. We hope that we provide enough
508 high-level matrix routines that few users will need it.
509
510 MatGetRow() always returns 0-based column indices, regardless of
511 whether the internal representation is 0-based (default) or 1-based.
512
513 For better efficiency, set cols and/or vals to NULL if you do
514 not wish to extract these quantities.
515
516 The user can only examine the values extracted with MatGetRow();
517 the values cannot be altered. To change the matrix entries, one
518 must use MatSetValues().
519
520 You can only have one call to MatGetRow() outstanding for a particular
521 matrix at a time, per processor. MatGetRow() can only obtain rows
522 associated with the given processor, it cannot get rows from the
523 other processors; for that we suggest using MatCreateSubMatrices(), then
524 MatGetRow() on the submatrix. The row index passed to MatGetRow()
525 is in the global number of rows.
526
527 Fortran Notes:
528 The calling sequence from Fortran is
529.vb
530 MatGetRow(matrix,row,ncols,cols,values,ierr)
531 Mat matrix (input)
532 integer row (input)
533 integer ncols (output)
534 integer cols(maxcols) (output)
535 double precision (or double complex) values(maxcols) output
536.ve
537 where maxcols >= maximum nonzeros in any row of the matrix.
538
539
540 Caution:
541 Do not try to change the contents of the output arrays (cols and vals).
542 In some cases, this may corrupt the matrix.
543
544 Level: advanced
545
546.seealso: MatRestoreRow(), MatSetValues(), MatGetValues(), MatCreateSubMatrices(), MatGetDiagonal()
547@*/
548PetscErrorCode MatGetRow(Mat mat,PetscInt row,PetscInt *ncols,const PetscInt *cols[],const PetscScalar *vals[])
549{
550 PetscErrorCode ierr;
551 PetscInt incols;
552
553 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 553; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
554 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),554,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),554,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),554,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),554,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
555 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),555,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
556 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),556,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
557 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),557,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
558 if (!mat->ops->getrow) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),558,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
559 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),559,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
560 ierr = PetscLogEventBegin(MAT_GetRow,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetRow].active) ? (*PetscLogPLB)((MAT_GetRow),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),560,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
561 ierr = (*mat->ops->getrow)(mat,row,&incols,(PetscInt**)cols,(PetscScalar**)vals);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),561,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
562 if (ncols) *ncols = incols;
563 ierr = PetscLogEventEnd(MAT_GetRow,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetRow].active) ? (*PetscLogPLE)((MAT_GetRow),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),563,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
564 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
565}
566
567/*@
568 MatConjugate - replaces the matrix values with their complex conjugates
569
570 Logically Collective on Mat
571
572 Input Parameters:
573. mat - the matrix
574
575 Level: advanced
576
577.seealso: VecConjugate()
578@*/
579PetscErrorCode MatConjugate(Mat mat)
580{
581#if defined(PETSC_USE_COMPLEX)
582 PetscErrorCode ierr;
583
584 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 584; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
585 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),585,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),585,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),585,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),585,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
586 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),586,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
587 if (!mat->ops->conjugate) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not provided for this matrix format, send email to [email protected]")return PetscError(PetscObjectComm((PetscObject)mat),587,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Not provided for this matrix format, send email to [email protected]"
)
;
588 ierr = (*mat->ops->conjugate)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),588,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
589#else
590 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 590; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
591#endif
592 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
593}
594
595/*@C
596 MatRestoreRow - Frees any temporary space allocated by MatGetRow().
597
598 Not Collective
599
600 Input Parameters:
601+ mat - the matrix
602. row - the row to get
603. ncols, cols - the number of nonzeros and their columns
604- vals - if nonzero the column values
605
606 Notes:
607 This routine should be called after you have finished examining the entries.
608
609 This routine zeros out ncols, cols, and vals. This is to prevent accidental
610 us of the array after it has been restored. If you pass NULL, it will
611 not zero the pointers. Use of cols or vals after MatRestoreRow is invalid.
612
613 Fortran Notes:
614 The calling sequence from Fortran is
615.vb
616 MatRestoreRow(matrix,row,ncols,cols,values,ierr)
617 Mat matrix (input)
618 integer row (input)
619 integer ncols (output)
620 integer cols(maxcols) (output)
621 double precision (or double complex) values(maxcols) output
622.ve
623 Where maxcols >= maximum nonzeros in any row of the matrix.
624
625 In Fortran MatRestoreRow() MUST be called after MatGetRow()
626 before another call to MatGetRow() can be made.
627
628 Level: advanced
629
630.seealso: MatGetRow()
631@*/
632PetscErrorCode MatRestoreRow(Mat mat,PetscInt row,PetscInt *ncols,const PetscInt *cols[],const PetscScalar *vals[])
633{
634 PetscErrorCode ierr;
635
636 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 636; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
637 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),637,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),637,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),637,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),637,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
638 if (ncols) PetscValidIntPointer(ncols,3)do { if (!ncols) return PetscError(((MPI_Comm)0x44000001),638
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(ncols,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),638,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
639 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),639,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
640 if (!mat->ops->restorerow) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
641 ierr = (*mat->ops->restorerow)(mat,row,ncols,(PetscInt **)cols,(PetscScalar **)vals);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),641,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
642 if (ncols) *ncols = 0;
643 if (cols) *cols = NULL((void*)0);
644 if (vals) *vals = NULL((void*)0);
645 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
646}
647
648/*@
649 MatGetRowUpperTriangular - Sets a flag to enable calls to MatGetRow() for matrix in MATSBAIJ format.
650 You should call MatRestoreRowUpperTriangular() after calling MatGetRow/MatRestoreRow() to disable the flag.
651
652 Not Collective
653
654 Input Parameters:
655. mat - the matrix
656
657 Notes:
658 The flag is to ensure that users are aware of MatGetRow() only provides the upper trianglular part of the row for the matrices in MATSBAIJ format.
659
660 Level: advanced
661
662.seealso: MatRestoreRowUpperTriangular()
663@*/
664PetscErrorCode MatGetRowUpperTriangular(Mat mat)
665{
666 PetscErrorCode ierr;
667
668 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 668; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
669 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),669,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),669,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),669,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),669,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
670 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),670,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
671 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),671,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
672 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),672,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
673 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),673,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
674 if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
675 ierr = (*mat->ops->getrowuppertriangular)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),675,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
676 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
677}
678
679/*@
680 MatRestoreRowUpperTriangular - Disable calls to MatGetRow() for matrix in MATSBAIJ format.
681
682 Not Collective
683
684 Input Parameters:
685. mat - the matrix
686
687 Notes:
688 This routine should be called after you have finished MatGetRow/MatRestoreRow().
689
690
691 Level: advanced
692
693.seealso: MatGetRowUpperTriangular()
694@*/
695PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
696{
697 PetscErrorCode ierr;
698
699 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 699; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
700 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),700,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),700,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),700,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),700,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
701 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),701,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
702 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),702,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
703 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),703,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
704 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),704,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
705 if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
706 ierr = (*mat->ops->restorerowuppertriangular)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),706,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
707 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
708}
709
710/*@C
711 MatSetOptionsPrefix - Sets the prefix used for searching for all
712 Mat options in the database.
713
714 Logically Collective on Mat
715
716 Input Parameter:
717+ A - the Mat context
718- prefix - the prefix to prepend to all option names
719
720 Notes:
721 A hyphen (-) must NOT be given at the beginning of the prefix name.
722 The first character of all runtime options is AUTOMATICALLY the hyphen.
723
724 Level: advanced
725
726.seealso: MatSetFromOptions()
727@*/
728PetscErrorCode MatSetOptionsPrefix(Mat A,const char prefix[])
729{
730 PetscErrorCode ierr;
731
732 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 732; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
733 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),733,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),733,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),733,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),733,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
734 ierr = PetscObjectSetOptionsPrefix((PetscObject)A,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),734,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
735 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
736}
737
738/*@C
739 MatAppendOptionsPrefix - Appends to the prefix used for searching for all
740 Mat options in the database.
741
742 Logically Collective on Mat
743
744 Input Parameters:
745+ A - the Mat context
746- prefix - the prefix to prepend to all option names
747
748 Notes:
749 A hyphen (-) must NOT be given at the beginning of the prefix name.
750 The first character of all runtime options is AUTOMATICALLY the hyphen.
751
752 Level: advanced
753
754.seealso: MatGetOptionsPrefix()
755@*/
756PetscErrorCode MatAppendOptionsPrefix(Mat A,const char prefix[])
757{
758 PetscErrorCode ierr;
759
760 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 760; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
761 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),761,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),761,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
762 ierr = PetscObjectAppendOptionsPrefix((PetscObject)A,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),762,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
763 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
764}
765
766/*@C
767 MatGetOptionsPrefix - Sets the prefix used for searching for all
768 Mat options in the database.
769
770 Not Collective
771
772 Input Parameter:
773. A - the Mat context
774
775 Output Parameter:
776. prefix - pointer to the prefix string used
777
778 Notes:
779 On the fortran side, the user should pass in a string 'prefix' of
780 sufficient length to hold the prefix.
781
782 Level: advanced
783
784.seealso: MatAppendOptionsPrefix()
785@*/
786PetscErrorCode MatGetOptionsPrefix(Mat A,const char *prefix[])
787{
788 PetscErrorCode ierr;
789
790 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 790; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
791 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),791,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),791,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),791,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),791,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
792 ierr = PetscObjectGetOptionsPrefix((PetscObject)A,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),792,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
793 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
794}
795
796/*@
797 MatResetPreallocation - Reset mat to use the original nonzero pattern provided by users.
798
799 Collective on Mat
800
801 Input Parameters:
802. A - the Mat context
803
804 Notes:
805 The allocated memory will be shrunk after calling MatAssembly with MAT_FINAL_ASSEMBLY. Users can reset the preallocation to access the original memory.
806 Currently support MPIAIJ and SEQAIJ.
807
808 Level: beginner
809
810.seealso: MatSeqAIJSetPreallocation(), MatMPIAIJSetPreallocation(), MatXAIJSetPreallocation()
811@*/
812PetscErrorCode MatResetPreallocation(Mat A)
813{
814 PetscErrorCode ierr;
815
816 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 816; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
817 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),817,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),817,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),817,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),817,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
818 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),818,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
819 ierr = PetscUseMethod(A,"MatResetPreallocation_C",(Mat),(A))0; do { PetscErrorCode (*_7_f)(Mat), _7_ierr; _7_ierr = PetscObjectQueryFunction_Private
(((PetscObject)(A)),("MatResetPreallocation_C"),(PetscVoidFunction
*)(&_7_f));do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),819,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_f) {_7_ierr
= (*_7_f)(A);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),819,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0);} else return PetscError
(PetscObjectComm((PetscObject)(A)),819,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Cannot locate function %s in object"
,"MatResetPreallocation_C"); } while(0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),819,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
820 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
821}
822
823
824/*@
825 MatSetUp - Sets up the internal matrix data structures for the later use.
826
827 Collective on Mat
828
829 Input Parameters:
830. A - the Mat context
831
832 Notes:
833 If the user has not set preallocation for this matrix then a default preallocation that is likely to be inefficient is used.
834
835 If a suitable preallocation routine is used, this function does not need to be called.
836
837 See the Performance chapter of the PETSc users manual for how to preallocate matrices
838
839 Level: beginner
840
841.seealso: MatCreate(), MatDestroy()
842@*/
843PetscErrorCode MatSetUp(Mat A)
844{
845 PetscMPIInt size;
846 PetscErrorCode ierr;
847
848 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 848; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
849 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),849,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),849,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),849,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),849,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
850 if (!((PetscObject)A)->type_name) {
851 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A), &size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),851,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
852 if (size == 1) {
853 ierr = MatSetType(A, MATSEQAIJ"seqaij");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),853,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
854 } else {
855 ierr = MatSetType(A, MATMPIAIJ"mpiaij");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),855,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
856 }
857 }
858 if (!A->preallocated && A->ops->setup) {
859 ierr = PetscInfo(A,"Warning not preallocating matrix storage\n")PetscInfo_Private(__func__,A,"Warning not preallocating matrix storage\n"
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),859,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
860 ierr = (*A->ops->setup)(A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),860,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
861 }
862 ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),862,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
863 ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),863,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
864 A->preallocated = PETSC_TRUE;
865 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
866}
867
868#if defined(PETSC_HAVE_SAWS)
869#include <petscviewersaws.h>
870#endif
871/*@C
872 MatView - Visualizes a matrix object.
873
874 Collective on Mat
875
876 Input Parameters:
877+ mat - the matrix
878- viewer - visualization context
879
880 Notes:
881 The available visualization contexts include
882+ PETSC_VIEWER_STDOUT_SELF - for sequential matrices
883. PETSC_VIEWER_STDOUT_WORLD - for parallel matrices created on PETSC_COMM_WORLD
884. PETSC_VIEWER_STDOUT_(comm) - for matrices created on MPI communicator comm
885- PETSC_VIEWER_DRAW_WORLD - graphical display of nonzero structure
886
887 The user can open alternative visualization contexts with
888+ PetscViewerASCIIOpen() - Outputs matrix to a specified file
889. PetscViewerBinaryOpen() - Outputs matrix in binary to a
890 specified file; corresponding input uses MatLoad()
891. PetscViewerDrawOpen() - Outputs nonzero matrix structure to
892 an X window display
893- PetscViewerSocketOpen() - Outputs matrix to Socket viewer.
894 Currently only the sequential dense and AIJ
895 matrix types support the Socket viewer.
896
897 The user can call PetscViewerPushFormat() to specify the output
898 format of ASCII printed objects (when using PETSC_VIEWER_STDOUT_SELF,
899 PETSC_VIEWER_STDOUT_WORLD and PetscViewerASCIIOpen). Available formats include
900+ PETSC_VIEWER_DEFAULT - default, prints matrix contents
901. PETSC_VIEWER_ASCII_MATLAB - prints matrix contents in Matlab format
902. PETSC_VIEWER_ASCII_DENSE - prints entire matrix including zeros
903. PETSC_VIEWER_ASCII_COMMON - prints matrix contents, using a sparse
904 format common among all matrix types
905. PETSC_VIEWER_ASCII_IMPL - prints matrix contents, using an implementation-specific
906 format (which is in many cases the same as the default)
907. PETSC_VIEWER_ASCII_INFO - prints basic information about the matrix
908 size and structure (not the matrix entries)
909- PETSC_VIEWER_ASCII_INFO_DETAIL - prints more detailed information about
910 the matrix structure
911
912 Options Database Keys:
913+ -mat_view ::ascii_info - Prints info on matrix at conclusion of MatAssemblyEnd()
914. -mat_view ::ascii_info_detail - Prints more detailed info
915. -mat_view - Prints matrix in ASCII format
916. -mat_view ::ascii_matlab - Prints matrix in Matlab format
917. -mat_view draw - PetscDraws nonzero structure of matrix, using MatView() and PetscDrawOpenX().
918. -display <name> - Sets display name (default is host)
919. -draw_pause <sec> - Sets number of seconds to pause after display
920. -mat_view socket - Sends matrix to socket, can be accessed from Matlab (see Users-Manual: ch_matlab for details)
921. -viewer_socket_machine <machine> -
922. -viewer_socket_port <port> -
923. -mat_view binary - save matrix to file in binary format
924- -viewer_binary_filename <name> -
925 Level: beginner
926
927 Notes:
928 The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
929 the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
930
931 See the manual page for MatLoad() for the exact format of the binary file when the binary
932 viewer is used.
933
934 See share/petsc/matlab/PetscBinaryRead.m for a Matlab code that can read in the binary file when the binary
935 viewer is used.
936
937 One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
938 and then use the following mouse functions.
939+ left mouse: zoom in
940. middle mouse: zoom out
941- right mouse: continue with the simulation
942
943.seealso: PetscViewerPushFormat(), PetscViewerASCIIOpen(), PetscViewerDrawOpen(),
944 PetscViewerSocketOpen(), PetscViewerBinaryOpen(), MatLoad()
945@*/
946PetscErrorCode MatView(Mat mat,PetscViewer viewer)
947{
948 PetscErrorCode ierr;
949 PetscInt rows,cols,rbs,cbs;
950 PetscBool iascii,ibinary,isstring;
951 PetscViewerFormat format;
952 PetscMPIInt size;
953#if defined(PETSC_HAVE_SAWS)
954 PetscBool issaws;
955#endif
956
957 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 957; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
958 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),958,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(mat
,PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),958,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),958,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),958,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
959 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),959,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
960 if (!viewer) {
961 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat),&viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),961,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
962 }
963 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2)do { if (!viewer) return PetscError(((MPI_Comm)0x44000001),963
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(viewer,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),963,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(viewer))->classid != PETSC_VIEWER_CLASSID
) { if (((PetscObject)(viewer))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),963,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),963,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
964 PetscCheckSameComm(mat,1,viewer,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)viewer),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0
)) return PetscError(((MPI_Comm)0x44000001),964,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),964,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
965 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),965,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
966 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),966,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
967 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),967,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
968 if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
969 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY"binary",&ibinary);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),969,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
970 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING"string",&isstring);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),970,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
971 if (ibinary) {
972 PetscBool mpiio;
973 ierr = PetscViewerBinaryGetUseMPIIO(viewer,&mpiio);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),973,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
974 if (mpiio) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"PETSc matrix viewers do not support using MPI-IO, turn off that flag")return PetscError(PetscObjectComm((PetscObject)viewer),974,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"PETSc matrix viewers do not support using MPI-IO, turn off that flag"
)
;
975 }
976
977 ierr = PetscLogEventBegin(MAT_View,mat,viewer,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_View].active) ? (*PetscLogPLB)((MAT_View),0,(PetscObject
)(mat),(PetscObject)(viewer),(PetscObject)(0),(PetscObject)(0
)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),977,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
978 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII"ascii",&iascii);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
979 if ((!iascii || (format != PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) && mat->factortype) {
980 SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"No viewers for factored matrix except ASCII info or info_detailed")return PetscError(PetscObjectComm((PetscObject)mat),980,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"No viewers for factored matrix except ASCII info or info_detailed"
)
;
981 }
982
983#if defined(PETSC_HAVE_SAWS)
984 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS"saws",&issaws);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
985#endif
986 if (iascii) {
987 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ORDER,"Must call MatAssemblyBegin/End() before viewing matrix")return PetscError(PetscObjectComm((PetscObject)mat),987,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",58,PETSC_ERROR_INITIAL
,"Must call MatAssemblyBegin/End() before viewing matrix")
;
988 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
989 if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
990 MatNullSpace nullsp,transnullsp;
991
992 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),992,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
993 ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),993,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
994 ierr = MatGetBlockSizes(mat,&rbs,&cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),994,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
995 if (rbs != 1 || cbs != 1) {
996 if (rbs != cbs) {ierr = PetscViewerASCIIPrintf(viewer,"rows=%D, cols=%D, rbs=%D, cbs = %D\n",rows,cols,rbs,cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
997 else {ierr = PetscViewerASCIIPrintf(viewer,"rows=%D, cols=%D, bs=%D\n",rows,cols,rbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),997,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
998 } else {
999 ierr = PetscViewerASCIIPrintf(viewer,"rows=%D, cols=%D\n",rows,cols);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),999,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1000 }
1001 if (mat->factortype) {
1002 MatSolverType solver;
1003 ierr = MatFactorGetSolverType(mat,&solver);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1003,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1004 ierr = PetscViewerASCIIPrintf(viewer,"package used to perform factorization: %s\n",solver);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1004,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1005 }
1006 if (mat->ops->getinfo) {
1007 MatInfo info;
1008 ierr = MatGetInfo(mat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1008,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1009 ierr = PetscViewerASCIIPrintf(viewer,"total: nonzeros=%.f, allocated nonzeros=%.f\n",info.nz_used,info.nz_allocated);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1009,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1010 ierr = PetscViewerASCIIPrintf(viewer,"total number of mallocs used during MatSetValues calls =%D\n",(PetscInt)info.mallocs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1010,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1011 }
1012 ierr = MatGetNullSpace(mat,&nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1012,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1013 ierr = MatGetTransposeNullSpace(mat,&transnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1013,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1014 if (nullsp) {ierr = PetscViewerASCIIPrintf(viewer," has attached null space\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1014,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1015 if (transnullsp && transnullsp != nullsp) {ierr = PetscViewerASCIIPrintf(viewer," has attached transposed null space\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1015,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1016 ierr = MatGetNearNullSpace(mat,&nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1016,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1017 if (nullsp) {ierr = PetscViewerASCIIPrintf(viewer," has attached near null space\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1017,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1018 }
1019#if defined(PETSC_HAVE_SAWS)
1020 } else if (issaws) {
1021 PetscMPIInt rank;
1022
1023 ierr = PetscObjectName((PetscObject)mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1023,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1024 ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1024,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1025 if (!((PetscObject)mat)->amsmem && !rank) {
1026 ierr = PetscObjectViewSAWs((PetscObject)mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1026,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1027 }
1028#endif
1029 } else if (isstring) {
1030 const char *type;
1031 ierr = MatGetType(mat,&type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1031,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1032 ierr = PetscViewerStringSPrintf(viewer," MatType: %-7.7s",type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1032,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1033 if (mat->ops->view) {ierr = (*mat->ops->view)(mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1033,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1034 }
1035 if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1036 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1036,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1037 ierr = (*mat->ops->viewnative)(mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1037,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1038 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1038,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1039 } else if (mat->ops->view) {
1040 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1040,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1041 ierr = (*mat->ops->view)(mat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1041,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1042 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1042,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1043 }
1044 if (iascii) {
1045 ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1046 if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1047 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1047,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1048 }
1049 }
1050 ierr = PetscLogEventEnd(MAT_View,mat,viewer,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_View].active) ? (*PetscLogPLE)((MAT_View),0,(PetscObject
)(mat),(PetscObject)(viewer),(PetscObject)(0),(PetscObject)(0
)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1050,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1051 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1052}
1053
1054#if defined(PETSC_USE_DEBUG1)
1055#include <../src/sys/totalview/tv_data_display.h>
1056PETSC_UNUSED__attribute((unused)) static int TV_display_type(const struct _p_Mat *mat)
1057{
1058 TV_add_row("Local rows", "int", &mat->rmap->n);
1059 TV_add_row("Local columns", "int", &mat->cmap->n);
1060 TV_add_row("Global rows", "int", &mat->rmap->N);
1061 TV_add_row("Global columns", "int", &mat->cmap->N);
1062 TV_add_row("Typename", TV_ascii_string_type"$string", ((PetscObject)mat)->type_name);
1063 return TV_format_OK;
1064}
1065#endif
1066
1067/*@C
1068 MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1069 with MatView(). The matrix format is determined from the options database.
1070 Generates a parallel MPI matrix if the communicator has more than one
1071 processor. The default matrix type is AIJ.
1072
1073 Collective on PetscViewer
1074
1075 Input Parameters:
1076+ newmat - the newly loaded matrix, this needs to have been created with MatCreate()
1077 or some related function before a call to MatLoad()
1078- viewer - binary/HDF5 file viewer
1079
1080 Options Database Keys:
1081 Used with block matrix formats (MATSEQBAIJ, ...) to specify
1082 block size
1083. -matload_block_size <bs>
1084
1085 Level: beginner
1086
1087 Notes:
1088 If the Mat type has not yet been given then MATAIJ is used, call MatSetFromOptions() on the
1089 Mat before calling this routine if you wish to set it from the options database.
1090
1091 MatLoad() automatically loads into the options database any options
1092 given in the file filename.info where filename is the name of the file
1093 that was passed to the PetscViewerBinaryOpen(). The options in the info
1094 file will be ignored if you use the -viewer_binary_skip_info option.
1095
1096 If the type or size of newmat is not set before a call to MatLoad, PETSc
1097 sets the default matrix type AIJ and sets the local and global sizes.
1098 If type and/or size is already set, then the same are used.
1099
1100 In parallel, each processor can load a subset of rows (or the
1101 entire matrix). This routine is especially useful when a large
1102 matrix is stored on disk and only part of it is desired on each
1103 processor. For example, a parallel solver may access only some of
1104 the rows from each processor. The algorithm used here reads
1105 relatively small blocks of data rather than reading the entire
1106 matrix and then subsetting it.
1107
1108 Viewer's PetscViewerType must be either PETSCVIEWERBINARY or PETSCVIEWERHDF5.
1109 Such viewer can be created using PetscViewerBinaryOpen()/PetscViewerHDF5Open(),
1110 or the sequence like
1111$ PetscViewer v;
1112$ PetscViewerCreate(PETSC_COMM_WORLD,&v);
1113$ PetscViewerSetType(v,PETSCVIEWERBINARY);
1114$ PetscViewerSetFromOptions(v);
1115$ PetscViewerFileSetMode(v,FILE_MODE_READ);
1116$ PetscViewerFileSetName(v,"datafile");
1117 The optional PetscViewerSetFromOptions() call allows to override PetscViewerSetType() using option
1118$ -viewer_type {binary,hdf5}
1119
1120 See the example src/ksp/ksp/examples/tutorials/ex27.c with the first approach,
1121 and src/mat/examples/tutorials/ex10.c with the second approach.
1122
1123 Notes about the PETSc binary format:
1124 In case of PETSCVIEWERBINARY, a native PETSc binary format is used. Each of the blocks
1125 is read onto rank 0 and then shipped to its destination rank, one after another.
1126 Multiple objects, both matrices and vectors, can be stored within the same file.
1127 Their PetscObject name is ignored; they are loaded in the order of their storage.
1128
1129 Most users should not need to know the details of the binary storage
1130 format, since MatLoad() and MatView() completely hide these details.
1131 But for anyone who's interested, the standard binary matrix storage
1132 format is
1133
1134$ int MAT_FILE_CLASSID
1135$ int number of rows
1136$ int number of columns
1137$ int total number of nonzeros
1138$ int *number nonzeros in each row
1139$ int *column indices of all nonzeros (starting index is zero)
1140$ PetscScalar *values of all nonzeros
1141
1142 PETSc automatically does the byte swapping for
1143machines that store the bytes reversed, e.g. DEC alpha, freebsd,
1144linux, Windows and the paragon; thus if you write your own binary
1145read/write routines you have to swap the bytes; see PetscBinaryRead()
1146and PetscBinaryWrite() to see how this may be done.
1147
1148 Notes about the HDF5 (MATLAB MAT-File Version 7.3) format:
1149 In case of PETSCVIEWERHDF5, a parallel HDF5 reader is used.
1150 Each processor's chunk is loaded independently by its owning rank.
1151 Multiple objects, both matrices and vectors, can be stored within the same file.
1152 They are looked up by their PetscObject name.
1153
1154 As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1155 by default the same structure and naming of the AIJ arrays and column count
1156 within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1157$ save example.mat A b -v7.3
1158 can be directly read by this routine (see Reference 1 for details).
1159 Note that depending on your MATLAB version, this format might be a default,
1160 otherwise you can set it as default in Preferences.
1161
1162 Unless -nocompression flag is used to save the file in MATLAB,
1163 PETSc must be configured with ZLIB package.
1164
1165 See also examples src/mat/examples/tutorials/ex10.c and src/ksp/ksp/examples/tutorials/ex27.c
1166
1167 Current HDF5 (MAT-File) limitations:
1168 This reader currently supports only real MATSEQAIJ, MATMPIAIJ, MATSEQDENSE and MATMPIDENSE matrices.
1169
1170 Corresponding MatView() is not yet implemented.
1171
1172 The loaded matrix is actually a transpose of the original one in MATLAB,
1173 unless you push PETSC_VIEWER_HDF5_MAT format (see examples above).
1174 With this format, matrix is automatically transposed by PETSc,
1175 unless the matrix is marked as SPD or symmetric
1176 (see MatSetOption(), MAT_SPD, MAT_SYMMETRIC).
1177
1178 References:
11791. MATLAB(R) Documentation, manual page of save(), https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version
1180
1181.seealso: PetscViewerBinaryOpen(), PetscViewerSetType(), MatView(), VecLoad()
1182
1183 @*/
1184PetscErrorCode MatLoad(Mat newmat,PetscViewer viewer)
1185{
1186 PetscErrorCode ierr;
1187 PetscBool flg;
1188
1189 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1189; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1190 PetscValidHeaderSpecific(newmat,MAT_CLASSID,1)do { if (!newmat) return PetscError(((MPI_Comm)0x44000001),1190
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(newmat,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),1190,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(newmat))->classid != MAT_CLASSID) {
if (((PetscObject)(newmat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),1190,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1190,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1191 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2)do { if (!viewer) return PetscError(((MPI_Comm)0x44000001),1191
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(viewer,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),1191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(viewer))->classid != PETSC_VIEWER_CLASSID
) { if (((PetscObject)(viewer))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),1191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),1191,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
1192
1193 if (!((PetscObject)newmat)->type_name) {
1194 ierr = MatSetType(newmat,MATAIJ"aij");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1194,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1195 }
1196
1197 flg = PETSC_FALSE;
1198 ierr = PetscOptionsGetBool(((PetscObject)newmat)->options,((PetscObject)newmat)->prefix,"-matload_symmetric",&flg,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1198,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1199 if (flg) {
1200 ierr = MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1200,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1201 ierr = MatSetOption(newmat,MAT_SYMMETRY_ETERNAL,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1201,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1202 }
1203 flg = PETSC_FALSE;
1204 ierr = PetscOptionsGetBool(((PetscObject)newmat)->options,((PetscObject)newmat)->prefix,"-matload_spd",&flg,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1204,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1205 if (flg) {
1206 ierr = MatSetOption(newmat,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1207 }
1208
1209 if (!newmat->ops->load) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatLoad is not supported for type")return PetscError(((MPI_Comm)0x44000001),1209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"MatLoad is not supported for type")
;
1210 ierr = PetscLogEventBegin(MAT_Load,viewer,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Load].active) ? (*PetscLogPLB)((MAT_Load),0,(PetscObject
)(viewer),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1210,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1211 ierr = (*newmat->ops->load)(newmat,viewer);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1211,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1212 ierr = PetscLogEventEnd(MAT_Load,viewer,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Load].active) ? (*PetscLogPLE)((MAT_Load),0,(PetscObject
)(viewer),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1212,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1213 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1214}
1215
1216PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1217{
1218 PetscErrorCode ierr;
1219 Mat_Redundant *redund = *redundant;
1220 PetscInt i;
1221
1222 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1222; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1223 if (redund){
1224 if (redund->matseq) { /* via MatCreateSubMatrices() */
1225 ierr = ISDestroy(&redund->isrow);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1225,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1226 ierr = ISDestroy(&redund->iscol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1226,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1227 ierr = MatDestroySubMatrices(1,&redund->matseq);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1227,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1228 } else {
1229 ierr = PetscFree2(redund->send_rank,redund->recv_rank)PetscFreeA(2,1229,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(redund->send_rank),&(redund->recv_rank))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1229,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1230 ierr = PetscFree(redund->sbuf_j)((*PetscTrFree)((void*)(redund->sbuf_j),1230,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund->sbuf_j) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1230,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1231 ierr = PetscFree(redund->sbuf_a)((*PetscTrFree)((void*)(redund->sbuf_a),1231,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund->sbuf_a) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1231,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1232 for (i=0; i<redund->nrecvs; i++) {
1233 ierr = PetscFree(redund->rbuf_j[i])((*PetscTrFree)((void*)(redund->rbuf_j[i]),1233,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund->rbuf_j[i]) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1233,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1234 ierr = PetscFree(redund->rbuf_a[i])((*PetscTrFree)((void*)(redund->rbuf_a[i]),1234,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund->rbuf_a[i]) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1234,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1235 }
1236 ierr = PetscFree4(redund->sbuf_nz,redund->rbuf_nz,redund->rbuf_j,redund->rbuf_a)PetscFreeA(4,1236,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(redund->sbuf_nz),&(redund->rbuf_nz),&(redund
->rbuf_j),&(redund->rbuf_a))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1236,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1237 }
1238
1239 if (redund->subcomm) {
1240 ierr = PetscCommDestroy(&redund->subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1240,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1241 }
1242 ierr = PetscFree(redund)((*PetscTrFree)((void*)(redund),1242,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((redund) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1242,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1243 }
1244 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1245}
1246
1247/*@
1248 MatDestroy - Frees space taken by a matrix.
1249
1250 Collective on Mat
1251
1252 Input Parameter:
1253. A - the matrix
1254
1255 Level: beginner
1256
1257@*/
1258PetscErrorCode MatDestroy(Mat *A)
1259{
1260 PetscErrorCode ierr;
1261
1262 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1262; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1263 if (!*A) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1264 PetscValidHeaderSpecific(*A,MAT_CLASSID,1)do { if (!*A) return PetscError(((MPI_Comm)0x44000001),1264,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(*A,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),1264,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(*A))->classid != MAT_CLASSID) { if
(((PetscObject)(*A))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),1264,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1264,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1265 if (--((PetscObject)(*A))->refct > 0) {*A = NULL((void*)0); PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;}
1266
1267 /* if memory was published with SAWs then destroy it */
1268 ierr = PetscObjectSAWsViewOff((PetscObject)*A)0;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1268,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1269 if ((*A)->ops->destroy) {
1270 ierr = (*(*A)->ops->destroy)(*A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1270,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1271 }
1272
1273 ierr = PetscFree((*A)->defaultvectype)((*PetscTrFree)((void*)((*A)->defaultvectype),1273,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c") || (
((*A)->defaultvectype) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1274 ierr = PetscFree((*A)->bsizes)((*PetscTrFree)((void*)((*A)->bsizes),1274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || (((*A)->bsizes) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1275 ierr = PetscFree((*A)->solvertype)((*PetscTrFree)((void*)((*A)->solvertype),1275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || (((*A)->solvertype) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1276 ierr = MatDestroy_Redundant(&(*A)->redundant);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1277 ierr = MatNullSpaceDestroy(&(*A)->nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1277,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1278 ierr = MatNullSpaceDestroy(&(*A)->transnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1279 ierr = MatNullSpaceDestroy(&(*A)->nearnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1279,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1280 ierr = MatDestroy(&(*A)->schur);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1280,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1281 ierr = PetscLayoutDestroy(&(*A)->rmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1281,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1282 ierr = PetscLayoutDestroy(&(*A)->cmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1282,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1283 ierr = PetscHeaderDestroy(A)(PetscHeaderDestroy_Private((PetscObject)(*A)) || ((*PetscTrFree
)((void*)(*A),1283,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((*A) = 0,0)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1283,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1284 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1285}
1286
1287/*@C
1288 MatSetValues - Inserts or adds a block of values into a matrix.
1289 These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
1290 MUST be called after all calls to MatSetValues() have been completed.
1291
1292 Not Collective
1293
1294 Input Parameters:
1295+ mat - the matrix
1296. v - a logically two-dimensional array of values
1297. m, idxm - the number of rows and their global indices
1298. n, idxn - the number of columns and their global indices
1299- addv - either ADD_VALUES or INSERT_VALUES, where
1300 ADD_VALUES adds values to any existing entries, and
1301 INSERT_VALUES replaces existing entries with new values
1302
1303 Notes:
1304 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
1305 MatSetUp() before using this routine
1306
1307 By default the values, v, are row-oriented. See MatSetOption() for other options.
1308
1309 Calls to MatSetValues() with the INSERT_VALUES and ADD_VALUES
1310 options cannot be mixed without intervening calls to the assembly
1311 routines.
1312
1313 MatSetValues() uses 0-based row and column numbers in Fortran
1314 as well as in C.
1315
1316 Negative indices may be passed in idxm and idxn, these rows and columns are
1317 simply ignored. This allows easily inserting element stiffness matrices
1318 with homogeneous Dirchlet boundary conditions that you don't want represented
1319 in the matrix.
1320
1321 Efficiency Alert:
1322 The routine MatSetValuesBlocked() may offer much better efficiency
1323 for users of block sparse formats (MATSEQBAIJ and MATMPIBAIJ).
1324
1325 Level: beginner
1326
1327 Developer Notes:
1328 This is labeled with C so does not automatically generate Fortran stubs and interfaces
1329 because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1330
1331.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1332 InsertMode, INSERT_VALUES, ADD_VALUES
1333@*/
1334PetscErrorCode MatSetValues(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],const PetscScalar v[],InsertMode addv)
1335{
1336 PetscErrorCode ierr;
1337#if defined(PETSC_USE_DEBUG1)
1338 PetscInt i,j;
1339#endif
1340
1341 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1341; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
1342 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1342,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1342,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1342,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1342,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1343 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1343,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1344 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
1345 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1345
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1345,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
1346 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1346
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1346,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,5); } while (0)
;
1347 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),1347,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
1348
1349 if (mat->insertmode == NOT_SET_VALUES) {
1350 mat->insertmode = addv;
1351 }
1352#if defined(PETSC_USE_DEBUG1)
1353 else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values")return PetscError(((MPI_Comm)0x44000001),1353,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add values and insert values"
)
;
1354 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1354,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1355 if (!mat->ops->setvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),1355,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
1356
1357 for (i=0; i<m; i++) {
1358 for (j=0; j<n; j++) {
1359 if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i*n+j]))
1360#if defined(PETSC_USE_COMPLEX)
1361 SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_FP,"Inserting %g+ig at matrix entry (%D,%D)",(double)PetscRealPart(v[i*n+j]),(double)PetscImaginaryPart(v[i*n+j]),idxm[i],idxn[j])return PetscError(((MPI_Comm)0x44000001),1361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,72,PETSC_ERROR_INITIAL,"Inserting %g+ig at matrix entry (%D,%D)"
,(double)(v[i*n+j]),(double)((PetscReal)0),idxm[i],idxn[j])
;
1362#else
1363 SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FP,"Inserting %g at matrix entry (%D,%D)",(double)v[i*n+j],idxm[i],idxn[j])return PetscError(((MPI_Comm)0x44000001),1363,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,72,PETSC_ERROR_INITIAL,"Inserting %g at matrix entry (%D,%D)"
,(double)v[i*n+j],idxm[i],idxn[j])
;
1364#endif
1365 }
1366 }
1367#endif
1368
1369 if (mat->assembled) {
1370 mat->was_assembled = PETSC_TRUE;
1371 mat->assembled = PETSC_FALSE;
1372 }
1373 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1373,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1374 ierr = (*mat->ops->setvalues)(mat,m,idxm,n,idxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1374,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1375 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1375,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1376 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1377}
1378
1379
1380/*@
1381 MatSetValuesRowLocal - Inserts a row (block row for BAIJ matrices) of nonzero
1382 values into a matrix
1383
1384 Not Collective
1385
1386 Input Parameters:
1387+ mat - the matrix
1388. row - the (block) row to set
1389- v - a logically two-dimensional array of values
1390
1391 Notes:
1392 By the values, v, are column-oriented (for the block version) and sorted
1393
1394 All the nonzeros in the row must be provided
1395
1396 The matrix must have previously had its column indices set
1397
1398 The row must belong to this process
1399
1400 Level: intermediate
1401
1402.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1403 InsertMode, INSERT_VALUES, ADD_VALUES, MatSetValues(), MatSetValuesRow(), MatSetLocalToGlobalMapping()
1404@*/
1405PetscErrorCode MatSetValuesRowLocal(Mat mat,PetscInt row,const PetscScalar v[])
1406{
1407 PetscErrorCode ierr;
1408 PetscInt globalrow;
1409
1410 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1410; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1411 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1411,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1411,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1411,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1411,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1412 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1412,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1413 PetscValidScalarPointer(v,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1413,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1413,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,2); } while (0)
;
1414 ierr = ISLocalToGlobalMappingApply(mat->rmap->mapping,1,&row,&globalrow);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1414,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1415 ierr = MatSetValuesRow(mat,globalrow,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1415,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1416 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1417}
1418
1419/*@
1420 MatSetValuesRow - Inserts a row (block row for BAIJ matrices) of nonzero
1421 values into a matrix
1422
1423 Not Collective
1424
1425 Input Parameters:
1426+ mat - the matrix
1427. row - the (block) row to set
1428- v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1429
1430 Notes:
1431 The values, v, are column-oriented for the block version.
1432
1433 All the nonzeros in the row must be provided
1434
1435 THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually MatSetValues() is used.
1436
1437 The row must belong to this process
1438
1439 Level: advanced
1440
1441.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1442 InsertMode, INSERT_VALUES, ADD_VALUES, MatSetValues()
1443@*/
1444PetscErrorCode MatSetValuesRow(Mat mat,PetscInt row,const PetscScalar v[])
1445{
1446 PetscErrorCode ierr;
1447
1448 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1448; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
1449 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1449,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1449,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1449,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1449,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1450 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1450,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1451 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),1451,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
1452 PetscValidScalarPointer(v,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1452,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1452,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,2); } while (0)
;
1453#if defined(PETSC_USE_DEBUG1)
1454 if (mat->insertmode == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add and insert values")return PetscError(((MPI_Comm)0x44000001),1454,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add and insert values")
;
1455 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1455,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1456#endif
1457 mat->insertmode = INSERT_VALUES;
1458
1459 if (mat->assembled) {
1460 mat->was_assembled = PETSC_TRUE;
1461 mat->assembled = PETSC_FALSE;
1462 }
1463 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1463,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1464 if (!mat->ops->setvaluesrow) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),1464,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
1465 ierr = (*mat->ops->setvaluesrow)(mat,row,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1465,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1466 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1466,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1467 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1468}
1469
1470/*@
1471 MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1472 Using structured grid indexing
1473
1474 Not Collective
1475
1476 Input Parameters:
1477+ mat - the matrix
1478. m - number of rows being entered
1479. idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1480. n - number of columns being entered
1481. idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1482. v - a logically two-dimensional array of values
1483- addv - either ADD_VALUES or INSERT_VALUES, where
1484 ADD_VALUES adds values to any existing entries, and
1485 INSERT_VALUES replaces existing entries with new values
1486
1487 Notes:
1488 By default the values, v, are row-oriented. See MatSetOption() for other options.
1489
1490 Calls to MatSetValuesStencil() with the INSERT_VALUES and ADD_VALUES
1491 options cannot be mixed without intervening calls to the assembly
1492 routines.
1493
1494 The grid coordinates are across the entire grid, not just the local portion
1495
1496 MatSetValuesStencil() uses 0-based row and column numbers in Fortran
1497 as well as in C.
1498
1499 For setting/accessing vector values via array coordinates you can use the DMDAVecGetArray() routine
1500
1501 In order to use this routine you must either obtain the matrix with DMCreateMatrix()
1502 or call MatSetLocalToGlobalMapping() and MatSetStencil() first.
1503
1504 The columns and rows in the stencil passed in MUST be contained within the
1505 ghost region of the given process as set with DMDACreateXXX() or MatSetStencil(). For example,
1506 if you create a DMDA with an overlap of one grid level and on a particular process its first
1507 local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1508 first i index you can use in your column and row indices in MatSetStencil() is 5.
1509
1510 In Fortran idxm and idxn should be declared as
1511$ MatStencil idxm(4,m),idxn(4,n)
1512 and the values inserted using
1513$ idxm(MatStencil_i,1) = i
1514$ idxm(MatStencil_j,1) = j
1515$ idxm(MatStencil_k,1) = k
1516$ idxm(MatStencil_c,1) = c
1517 etc
1518
1519 For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1520 obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1521 etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1522 DM_BOUNDARY_PERIODIC boundary type.
1523
1524 For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1525 a single value per point) you can skip filling those indices.
1526
1527 Inspired by the structured grid interface to the HYPRE package
1528 (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1529
1530 Efficiency Alert:
1531 The routine MatSetValuesBlockedStencil() may offer much better efficiency
1532 for users of block sparse formats (MATSEQBAIJ and MATMPIBAIJ).
1533
1534 Level: beginner
1535
1536.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal()
1537 MatSetValues(), MatSetValuesBlockedStencil(), MatSetStencil(), DMCreateMatrix(), DMDAVecGetArray(), MatStencil
1538@*/
1539PetscErrorCode MatSetValuesStencil(Mat mat,PetscInt m,const MatStencil idxm[],PetscInt n,const MatStencil idxn[],const PetscScalar v[],InsertMode addv)
1540{
1541 PetscErrorCode ierr;
1542 PetscInt buf[8192],*bufm=0,*bufn=0,*jdxm,*jdxn;
1543 PetscInt j,i,dim = mat->stencil.dim,*dims = mat->stencil.dims+1,tmp;
1544 PetscInt *starts = mat->stencil.starts,*dxm = (PetscInt*)idxm,*dxn = (PetscInt*)idxn,sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1545
1546 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1546; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1547 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
1548 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1548,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1548,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1548,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1548,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1549 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1549,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1550 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1550
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1550,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
1551 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1551
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1551,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,5); } while (0)
;
1552
1553 if ((m+n) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1554 jdxm = buf; jdxn = buf+m;
1555 } else {
1556 ierr = PetscMalloc2(m,&bufm,n,&bufn)PetscMallocA(2,PETSC_FALSE,1556,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(m)*sizeof(**(&bufm)),(&bufm),(size_t)(n)*sizeof
(**(&bufn)),(&bufn))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1556,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1557 jdxm = bufm; jdxn = bufn;
1558 }
1559 for (i=0; i<m; i++) {
1560 for (j=0; j<3-sdim; j++) dxm++;
1561 tmp = *dxm++ - starts[0];
1562 for (j=0; j<dim-1; j++) {
1563 if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1564 else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
1565 }
1566 if (mat->stencil.noc) dxm++;
1567 jdxm[i] = tmp;
1568 }
1569 for (i=0; i<n; i++) {
1570 for (j=0; j<3-sdim; j++) dxn++;
1571 tmp = *dxn++ - starts[0];
1572 for (j=0; j<dim-1; j++) {
1573 if ((*dxn++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1574 else tmp = tmp*dims[j] + *(dxn-1) - starts[j+1];
1575 }
1576 if (mat->stencil.noc) dxn++;
1577 jdxn[i] = tmp;
1578 }
1579 ierr = MatSetValuesLocal(mat,m,jdxm,n,jdxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1579,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1580 ierr = PetscFree2(bufm,bufn)PetscFreeA(2,1580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufm),&(bufn))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1581 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1582}
1583
1584/*@
1585 MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1586 Using structured grid indexing
1587
1588 Not Collective
1589
1590 Input Parameters:
1591+ mat - the matrix
1592. m - number of rows being entered
1593. idxm - grid coordinates for matrix rows being entered
1594. n - number of columns being entered
1595. idxn - grid coordinates for matrix columns being entered
1596. v - a logically two-dimensional array of values
1597- addv - either ADD_VALUES or INSERT_VALUES, where
1598 ADD_VALUES adds values to any existing entries, and
1599 INSERT_VALUES replaces existing entries with new values
1600
1601 Notes:
1602 By default the values, v, are row-oriented and unsorted.
1603 See MatSetOption() for other options.
1604
1605 Calls to MatSetValuesBlockedStencil() with the INSERT_VALUES and ADD_VALUES
1606 options cannot be mixed without intervening calls to the assembly
1607 routines.
1608
1609 The grid coordinates are across the entire grid, not just the local portion
1610
1611 MatSetValuesBlockedStencil() uses 0-based row and column numbers in Fortran
1612 as well as in C.
1613
1614 For setting/accessing vector values via array coordinates you can use the DMDAVecGetArray() routine
1615
1616 In order to use this routine you must either obtain the matrix with DMCreateMatrix()
1617 or call MatSetBlockSize(), MatSetLocalToGlobalMapping() and MatSetStencil() first.
1618
1619 The columns and rows in the stencil passed in MUST be contained within the
1620 ghost region of the given process as set with DMDACreateXXX() or MatSetStencil(). For example,
1621 if you create a DMDA with an overlap of one grid level and on a particular process its first
1622 local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1623 first i index you can use in your column and row indices in MatSetStencil() is 5.
1624
1625 In Fortran idxm and idxn should be declared as
1626$ MatStencil idxm(4,m),idxn(4,n)
1627 and the values inserted using
1628$ idxm(MatStencil_i,1) = i
1629$ idxm(MatStencil_j,1) = j
1630$ idxm(MatStencil_k,1) = k
1631 etc
1632
1633 Negative indices may be passed in idxm and idxn, these rows and columns are
1634 simply ignored. This allows easily inserting element stiffness matrices
1635 with homogeneous Dirchlet boundary conditions that you don't want represented
1636 in the matrix.
1637
1638 Inspired by the structured grid interface to the HYPRE package
1639 (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1640
1641 Level: beginner
1642
1643.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal()
1644 MatSetValues(), MatSetValuesStencil(), MatSetStencil(), DMCreateMatrix(), DMDAVecGetArray(), MatStencil,
1645 MatSetBlockSize(), MatSetLocalToGlobalMapping()
1646@*/
1647PetscErrorCode MatSetValuesBlockedStencil(Mat mat,PetscInt m,const MatStencil idxm[],PetscInt n,const MatStencil idxn[],const PetscScalar v[],InsertMode addv)
1648{
1649 PetscErrorCode ierr;
1650 PetscInt buf[8192],*bufm=0,*bufn=0,*jdxm,*jdxn;
1651 PetscInt j,i,dim = mat->stencil.dim,*dims = mat->stencil.dims+1,tmp;
1652 PetscInt *starts = mat->stencil.starts,*dxm = (PetscInt*)idxm,*dxn = (PetscInt*)idxn,sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1653
1654 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1654; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1655 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
1656 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1656,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1656,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1656,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1656,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1657 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1657,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1658 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1658
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1658,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
1659 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1659
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1659,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,5); } while (0)
;
1660 PetscValidScalarPointer(v,6)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1660,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1660,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,6); } while (0)
;
1661
1662 if ((m+n) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1663 jdxm = buf; jdxn = buf+m;
1664 } else {
1665 ierr = PetscMalloc2(m,&bufm,n,&bufn)PetscMallocA(2,PETSC_FALSE,1665,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(m)*sizeof(**(&bufm)),(&bufm),(size_t)(n)*sizeof
(**(&bufn)),(&bufn))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1665,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1666 jdxm = bufm; jdxn = bufn;
1667 }
1668 for (i=0; i<m; i++) {
1669 for (j=0; j<3-sdim; j++) dxm++;
1670 tmp = *dxm++ - starts[0];
1671 for (j=0; j<sdim-1; j++) {
1672 if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1673 else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
1674 }
1675 dxm++;
1676 jdxm[i] = tmp;
1677 }
1678 for (i=0; i<n; i++) {
1679 for (j=0; j<3-sdim; j++) dxn++;
1680 tmp = *dxn++ - starts[0];
1681 for (j=0; j<sdim-1; j++) {
1682 if ((*dxn++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1683 else tmp = tmp*dims[j] + *(dxn-1) - starts[j+1];
1684 }
1685 dxn++;
1686 jdxn[i] = tmp;
1687 }
1688 ierr = MatSetValuesBlockedLocal(mat,m,jdxm,n,jdxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1689 ierr = PetscFree2(bufm,bufn)PetscFreeA(2,1689,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufm),&(bufn))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1689,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1690 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1691}
1692
1693/*@
1694 MatSetStencil - Sets the grid information for setting values into a matrix via
1695 MatSetValuesStencil()
1696
1697 Not Collective
1698
1699 Input Parameters:
1700+ mat - the matrix
1701. dim - dimension of the grid 1, 2, or 3
1702. dims - number of grid points in x, y, and z direction, including ghost points on your processor
1703. starts - starting point of ghost nodes on your processor in x, y, and z direction
1704- dof - number of degrees of freedom per node
1705
1706
1707 Inspired by the structured grid interface to the HYPRE package
1708 (www.llnl.gov/CASC/hyper)
1709
1710 For matrices generated with DMCreateMatrix() this routine is automatically called and so not needed by the
1711 user.
1712
1713 Level: beginner
1714
1715.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal()
1716 MatSetValues(), MatSetValuesBlockedStencil(), MatSetValuesStencil()
1717@*/
1718PetscErrorCode MatSetStencil(Mat mat,PetscInt dim,const PetscInt dims[],const PetscInt starts[],PetscInt dof)
1719{
1720 PetscInt i;
1721
1722 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1722; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1723 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1723,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1723,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1723,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1723,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1724 PetscValidIntPointer(dims,3)do { if (!dims) return PetscError(((MPI_Comm)0x44000001),1724
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(dims,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1724,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
1725 PetscValidIntPointer(starts,4)do { if (!starts) return PetscError(((MPI_Comm)0x44000001),1725
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(starts,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1725,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,4); } while (0)
;
1726
1727 mat->stencil.dim = dim + (dof > 1);
1728 for (i=0; i<dim; i++) {
1729 mat->stencil.dims[i] = dims[dim-i-1]; /* copy the values in backwards */
1730 mat->stencil.starts[i] = starts[dim-i-1];
1731 }
1732 mat->stencil.dims[dim] = dof;
1733 mat->stencil.starts[dim] = 0;
1734 mat->stencil.noc = (PetscBool)(dof == 1);
1735 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1736}
1737
1738/*@C
1739 MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1740
1741 Not Collective
1742
1743 Input Parameters:
1744+ mat - the matrix
1745. v - a logically two-dimensional array of values
1746. m, idxm - the number of block rows and their global block indices
1747. n, idxn - the number of block columns and their global block indices
1748- addv - either ADD_VALUES or INSERT_VALUES, where
1749 ADD_VALUES adds values to any existing entries, and
1750 INSERT_VALUES replaces existing entries with new values
1751
1752 Notes:
1753 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call
1754 MatXXXXSetPreallocation() or MatSetUp() before using this routine.
1755
1756 The m and n count the NUMBER of blocks in the row direction and column direction,
1757 NOT the total number of rows/columns; for example, if the block size is 2 and
1758 you are passing in values for rows 2,3,4,5 then m would be 2 (not 4).
1759 The values in idxm would be 1 2; that is the first index for each block divided by
1760 the block size.
1761
1762 Note that you must call MatSetBlockSize() when constructing this matrix (before
1763 preallocating it).
1764
1765 By default the values, v, are row-oriented, so the layout of
1766 v is the same as for MatSetValues(). See MatSetOption() for other options.
1767
1768 Calls to MatSetValuesBlocked() with the INSERT_VALUES and ADD_VALUES
1769 options cannot be mixed without intervening calls to the assembly
1770 routines.
1771
1772 MatSetValuesBlocked() uses 0-based row and column numbers in Fortran
1773 as well as in C.
1774
1775 Negative indices may be passed in idxm and idxn, these rows and columns are
1776 simply ignored. This allows easily inserting element stiffness matrices
1777 with homogeneous Dirchlet boundary conditions that you don't want represented
1778 in the matrix.
1779
1780 Each time an entry is set within a sparse matrix via MatSetValues(),
1781 internal searching must be done to determine where to place the
1782 data in the matrix storage space. By instead inserting blocks of
1783 entries via MatSetValuesBlocked(), the overhead of matrix assembly is
1784 reduced.
1785
1786 Example:
1787$ Suppose m=n=2 and block size(bs) = 2 The array is
1788$
1789$ 1 2 | 3 4
1790$ 5 6 | 7 8
1791$ - - - | - - -
1792$ 9 10 | 11 12
1793$ 13 14 | 15 16
1794$
1795$ v[] should be passed in like
1796$ v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
1797$
1798$ If you are not using row oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
1799$ v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
1800
1801 Level: intermediate
1802
1803.seealso: MatSetBlockSize(), MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValues(), MatSetValuesBlockedLocal()
1804@*/
1805PetscErrorCode MatSetValuesBlocked(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],const PetscScalar v[],InsertMode addv)
1806{
1807 PetscErrorCode ierr;
1808
1809 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1809; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
1810 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1810,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1810,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1811 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1812 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
1813 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1813
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1813,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
1814 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1814
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1814,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,5); } while (0)
;
1815 PetscValidScalarPointer(v,6)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1815,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1815,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,6); } while (0)
;
1816 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),1816,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
1817 if (mat->insertmode == NOT_SET_VALUES) {
1818 mat->insertmode = addv;
1819 }
1820#if defined(PETSC_USE_DEBUG1)
1821 else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values")return PetscError(((MPI_Comm)0x44000001),1821,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add values and insert values"
)
;
1822 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1822,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1823 if (!mat->ops->setvaluesblocked && !mat->ops->setvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),1823,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
1824#endif
1825
1826 if (mat->assembled) {
1827 mat->was_assembled = PETSC_TRUE;
1828 mat->assembled = PETSC_FALSE;
1829 }
1830 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1830,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1831 if (mat->ops->setvaluesblocked) {
1832 ierr = (*mat->ops->setvaluesblocked)(mat,m,idxm,n,idxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1832,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1833 } else {
1834 PetscInt buf[8192],*bufr=0,*bufc=0,*iidxm,*iidxn;
1835 PetscInt i,j,bs,cbs;
1836 ierr = MatGetBlockSizes(mat,&bs,&cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1836,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1837 if (m*bs+n*cbs <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1838 iidxm = buf; iidxn = buf + m*bs;
1839 } else {
1840 ierr = PetscMalloc2(m*bs,&bufr,n*cbs,&bufc)PetscMallocA(2,PETSC_FALSE,1840,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(m*bs)*sizeof(**(&bufr)),(&bufr),(size_t)(n*
cbs)*sizeof(**(&bufc)),(&bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1840,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1841 iidxm = bufr; iidxn = bufc;
1842 }
1843 for (i=0; i<m; i++) {
1844 for (j=0; j<bs; j++) {
1845 iidxm[i*bs+j] = bs*idxm[i] + j;
1846 }
1847 }
1848 for (i=0; i<n; i++) {
1849 for (j=0; j<cbs; j++) {
1850 iidxn[i*cbs+j] = cbs*idxn[i] + j;
1851 }
1852 }
1853 ierr = MatSetValues(mat,m*bs,iidxm,n*cbs,iidxn,v,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1853,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1854 ierr = PetscFree2(bufr,bufc)PetscFreeA(2,1854,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufr),&(bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1854,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1855 }
1856 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1856,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1857 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1858}
1859
1860/*@
1861 MatGetValues - Gets a block of values from a matrix.
1862
1863 Not Collective; currently only returns a local block
1864
1865 Input Parameters:
1866+ mat - the matrix
1867. v - a logically two-dimensional array for storing the values
1868. m, idxm - the number of rows and their global indices
1869- n, idxn - the number of columns and their global indices
1870
1871 Notes:
1872 The user must allocate space (m*n PetscScalars) for the values, v.
1873 The values, v, are then returned in a row-oriented format,
1874 analogous to that used by default in MatSetValues().
1875
1876 MatGetValues() uses 0-based row and column numbers in
1877 Fortran as well as in C.
1878
1879 MatGetValues() requires that the matrix has been assembled
1880 with MatAssemblyBegin()/MatAssemblyEnd(). Thus, calls to
1881 MatSetValues() and MatGetValues() CANNOT be made in succession
1882 without intermediate matrix assembly.
1883
1884 Negative row or column indices will be ignored and those locations in v[] will be
1885 left unchanged.
1886
1887 Level: advanced
1888
1889.seealso: MatGetRow(), MatCreateSubMatrices(), MatSetValues()
1890@*/
1891PetscErrorCode MatGetValues(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1892{
1893 PetscErrorCode ierr;
1894
1895 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1895; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1896 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1896,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1896,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1896,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1896,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1897 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1897,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1898 if (!m || !n) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1899 PetscValidIntPointer(idxm,3)do { if (!idxm) return PetscError(((MPI_Comm)0x44000001),1899
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(idxm,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1899,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
1900 PetscValidIntPointer(idxn,5)do { if (!idxn) return PetscError(((MPI_Comm)0x44000001),1900
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(idxn,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),1900,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,5); } while (0)
;
1901 PetscValidScalarPointer(v,6)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1901,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1901,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,6); } while (0)
;
1902 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),1902,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
1903 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1903,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1904 if (!mat->ops->getvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),1904,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
1905 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),1905,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
1906
1907 ierr = PetscLogEventBegin(MAT_GetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetValues].active) ? (*PetscLogPLB)((MAT_GetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1907,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1908 ierr = (*mat->ops->getvalues)(mat,m,idxm,n,idxn,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1908,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1909 ierr = PetscLogEventEnd(MAT_GetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetValues].active) ? (*PetscLogPLE)((MAT_GetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1909,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1910 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1911}
1912
1913/*@
1914 MatSetValuesBatch - Adds (ADD_VALUES) many blocks of values into a matrix at once. The blocks must all be square and
1915 the same size. Currently, this can only be called once and creates the given matrix.
1916
1917 Not Collective
1918
1919 Input Parameters:
1920+ mat - the matrix
1921. nb - the number of blocks
1922. bs - the number of rows (and columns) in each block
1923. rows - a concatenation of the rows for each block
1924- v - a concatenation of logically two-dimensional arrays of values
1925
1926 Notes:
1927 In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
1928
1929 Level: advanced
1930
1931.seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1932 InsertMode, INSERT_VALUES, ADD_VALUES, MatSetValues()
1933@*/
1934PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
1935{
1936 PetscErrorCode ierr;
1937
1938 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1938; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1939 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),1939,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),1939,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),1939,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1939,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1940 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),1940,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
1941 PetscValidScalarPointer(rows,4)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),1941
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(rows,PETSC_DOUBLE)) return PetscError(((
MPI_Comm)0x44000001),1941,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,4); } while (0)
;
1942 PetscValidScalarPointer(v,5)do { if (!v) return PetscError(((MPI_Comm)0x44000001),1942,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(v,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),1942,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,5); } while (0)
;
1943#if defined(PETSC_USE_DEBUG1)
1944 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),1944,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
1945#endif
1946
1947 ierr = PetscLogEventBegin(MAT_SetValuesBatch,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValuesBatch].active) ? (*PetscLogPLB)((MAT_SetValuesBatch
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1947,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1948 if (mat->ops->setvaluesbatch) {
1949 ierr = (*mat->ops->setvaluesbatch)(mat,nb,bs,rows,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1949,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1950 } else {
1951 PetscInt b;
1952 for (b = 0; b < nb; ++b) {
1953 ierr = MatSetValues(mat, bs, &rows[b*bs], bs, &rows[b*bs], &v[b*bs*bs], ADD_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1953,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1954 }
1955 }
1956 ierr = PetscLogEventEnd(MAT_SetValuesBatch,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValuesBatch].active) ? (*PetscLogPLE)((MAT_SetValuesBatch
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1956,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1957 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1958}
1959
1960/*@
1961 MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
1962 the routine MatSetValuesLocal() to allow users to insert matrix entries
1963 using a local (per-processor) numbering.
1964
1965 Not Collective
1966
1967 Input Parameters:
1968+ x - the matrix
1969. rmapping - row mapping created with ISLocalToGlobalMappingCreate() or ISLocalToGlobalMappingCreateIS()
1970- cmapping - column mapping
1971
1972 Level: intermediate
1973
1974
1975.seealso: MatAssemblyBegin(), MatAssemblyEnd(), MatSetValues(), MatSetValuesLocal()
1976@*/
1977PetscErrorCode MatSetLocalToGlobalMapping(Mat x,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
1978{
1979 PetscErrorCode ierr;
1980
1981 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 1981; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1982 PetscValidHeaderSpecific(x,MAT_CLASSID,1)do { if (!x) return PetscError(((MPI_Comm)0x44000001),1982,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),1982,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(x))->classid != MAT_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),1982,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),1982,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
1983 PetscValidType(x,1)do { if (!((PetscObject)x)->type_name) return PetscError((
(MPI_Comm)0x44000001),1983,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)x)->class_name,1); } while (0)
;
1984 PetscValidHeaderSpecific(rmapping,IS_LTOGM_CLASSID,2)do { if (!rmapping) return PetscError(((MPI_Comm)0x44000001),
1984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(rmapping,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),1984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(rmapping))->classid != IS_LTOGM_CLASSID
) { if (((PetscObject)(rmapping))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),1984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),1984,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
1985 PetscValidHeaderSpecific(cmapping,IS_LTOGM_CLASSID,3)do { if (!cmapping) return PetscError(((MPI_Comm)0x44000001),
1985,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(cmapping,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),1985,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(cmapping))->classid != IS_LTOGM_CLASSID
) { if (((PetscObject)(cmapping))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),1985,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),1985,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
1986
1987 if (x->ops->setlocaltoglobalmapping) {
1988 ierr = (*x->ops->setlocaltoglobalmapping)(x,rmapping,cmapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1989 } else {
1990 ierr = PetscLayoutSetISLocalToGlobalMapping(x->rmap,rmapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1990,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1991 ierr = PetscLayoutSetISLocalToGlobalMapping(x->cmap,cmapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1991,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1992 }
1993 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1994}
1995
1996
1997/*@
1998 MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by MatSetLocalToGlobalMapping()
1999
2000 Not Collective
2001
2002 Input Parameters:
2003. A - the matrix
2004
2005 Output Parameters:
2006+ rmapping - row mapping
2007- cmapping - column mapping
2008
2009 Level: advanced
2010
2011
2012.seealso: MatSetValuesLocal()
2013@*/
2014PetscErrorCode MatGetLocalToGlobalMapping(Mat A,ISLocalToGlobalMapping *rmapping,ISLocalToGlobalMapping *cmapping)
2015{
2016 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2016; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2017 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),2017,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2017,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2017,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2017,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2018 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),2018,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
2019 if (rmapping) PetscValidPointer(rmapping,2)do { if (!rmapping) return PetscError(((MPI_Comm)0x44000001),
2019,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(rmapping,PETSC_CHAR)) return PetscError(
((MPI_Comm)0x44000001),2019,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
2020 if (cmapping) PetscValidPointer(cmapping,3)do { if (!cmapping) return PetscError(((MPI_Comm)0x44000001),
2020,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(cmapping,PETSC_CHAR)) return PetscError(
((MPI_Comm)0x44000001),2020,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
2021 if (rmapping) *rmapping = A->rmap->mapping;
2022 if (cmapping) *cmapping = A->cmap->mapping;
2023 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2024}
2025
2026/*@
2027 MatGetLayouts - Gets the PetscLayout objects for rows and columns
2028
2029 Not Collective
2030
2031 Input Parameters:
2032. A - the matrix
2033
2034 Output Parameters:
2035+ rmap - row layout
2036- cmap - column layout
2037
2038 Level: advanced
2039
2040.seealso: MatCreateVecs(), MatGetLocalToGlobalMapping()
2041@*/
2042PetscErrorCode MatGetLayouts(Mat A,PetscLayout *rmap,PetscLayout *cmap)
2043{
2044 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2044; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2045 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),2045,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2045,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2046 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),2046,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
2047 if (rmap) PetscValidPointer(rmap,2)do { if (!rmap) return PetscError(((MPI_Comm)0x44000001),2047
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(rmap,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2047,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
2048 if (cmap) PetscValidPointer(cmap,3)do { if (!cmap) return PetscError(((MPI_Comm)0x44000001),2048
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(cmap,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2048,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
2049 if (rmap) *rmap = A->rmap;
2050 if (cmap) *cmap = A->cmap;
2051 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2052}
2053
2054/*@C
2055 MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2056 using a local ordering of the nodes.
2057
2058 Not Collective
2059
2060 Input Parameters:
2061+ mat - the matrix
2062. nrow, irow - number of rows and their local indices
2063. ncol, icol - number of columns and their local indices
2064. y - a logically two-dimensional array of values
2065- addv - either INSERT_VALUES or ADD_VALUES, where
2066 ADD_VALUES adds values to any existing entries, and
2067 INSERT_VALUES replaces existing entries with new values
2068
2069 Notes:
2070 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
2071 MatSetUp() before using this routine
2072
2073 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatSetLocalToGlobalMapping() before using this routine
2074
2075 Calls to MatSetValuesLocal() with the INSERT_VALUES and ADD_VALUES
2076 options cannot be mixed without intervening calls to the assembly
2077 routines.
2078
2079 These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
2080 MUST be called after all calls to MatSetValuesLocal() have been completed.
2081
2082 Level: intermediate
2083
2084 Developer Notes:
2085 This is labeled with C so does not automatically generate Fortran stubs and interfaces
2086 because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2087
2088.seealso: MatAssemblyBegin(), MatAssemblyEnd(), MatSetValues(), MatSetLocalToGlobalMapping(),
2089 MatSetValueLocal()
2090@*/
2091PetscErrorCode MatSetValuesLocal(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
2092{
2093 PetscErrorCode ierr;
2094
2095 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2095; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
2096 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2096,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2096,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2096,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2096,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2097 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2097,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2098 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2098,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2099 if (!nrow || !ncol) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
2100 PetscValidIntPointer(irow,3)do { if (!irow) return PetscError(((MPI_Comm)0x44000001),2100
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(irow,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),2100,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
2101 PetscValidIntPointer(icol,5)do { if (!icol) return PetscError(((MPI_Comm)0x44000001),2101
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(icol,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),2101,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,5); } while (0)
;
2102 if (mat->insertmode == NOT_SET_VALUES) {
2103 mat->insertmode = addv;
2104 }
2105#if defined(PETSC_USE_DEBUG1)
2106 else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values")return PetscError(((MPI_Comm)0x44000001),2106,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add values and insert values"
)
;
2107 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),2107,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
2108 if (!mat->ops->setvalueslocal && !mat->ops->setvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),2108,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
2109#endif
2110
2111 if (mat->assembled) {
2112 mat->was_assembled = PETSC_TRUE;
2113 mat->assembled = PETSC_FALSE;
2114 }
2115 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2115,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2116 if (mat->ops->setvalueslocal) {
2117 ierr = (*mat->ops->setvalueslocal)(mat,nrow,irow,ncol,icol,y,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2117,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2118 } else {
2119 PetscInt buf[8192],*bufr=0,*bufc=0,*irowm,*icolm;
2120 if ((nrow+ncol) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
2121 irowm = buf; icolm = buf+nrow;
2122 } else {
2123 ierr = PetscMalloc2(nrow,&bufr,ncol,&bufc)PetscMallocA(2,PETSC_FALSE,2123,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nrow)*sizeof(**(&bufr)),(&bufr),(size_t)(ncol
)*sizeof(**(&bufc)),(&bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2123,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2124 irowm = bufr; icolm = bufc;
2125 }
2126 ierr = ISLocalToGlobalMappingApply(mat->rmap->mapping,nrow,irow,irowm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2126,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2127 ierr = ISLocalToGlobalMappingApply(mat->cmap->mapping,ncol,icol,icolm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2127,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2128 ierr = MatSetValues(mat,nrow,irowm,ncol,icolm,y,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2128,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2129 ierr = PetscFree2(bufr,bufc)PetscFreeA(2,2129,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufr),&(bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2129,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2130 }
2131 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2131,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2132 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2133}
2134
2135/*@C
2136 MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2137 using a local ordering of the nodes a block at a time.
2138
2139 Not Collective
2140
2141 Input Parameters:
2142+ x - the matrix
2143. nrow, irow - number of rows and their local indices
2144. ncol, icol - number of columns and their local indices
2145. y - a logically two-dimensional array of values
2146- addv - either INSERT_VALUES or ADD_VALUES, where
2147 ADD_VALUES adds values to any existing entries, and
2148 INSERT_VALUES replaces existing entries with new values
2149
2150 Notes:
2151 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
2152 MatSetUp() before using this routine
2153
2154 If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatSetBlockSize() and MatSetLocalToGlobalMapping()
2155 before using this routineBefore calling MatSetValuesLocal(), the user must first set the
2156
2157 Calls to MatSetValuesBlockedLocal() with the INSERT_VALUES and ADD_VALUES
2158 options cannot be mixed without intervening calls to the assembly
2159 routines.
2160
2161 These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
2162 MUST be called after all calls to MatSetValuesBlockedLocal() have been completed.
2163
2164 Level: intermediate
2165
2166 Developer Notes:
2167 This is labeled with C so does not automatically generate Fortran stubs and interfaces
2168 because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2169
2170.seealso: MatSetBlockSize(), MatSetLocalToGlobalMapping(), MatAssemblyBegin(), MatAssemblyEnd(),
2171 MatSetValuesLocal(), MatSetValuesBlocked()
2172@*/
2173PetscErrorCode MatSetValuesBlockedLocal(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
2174{
2175 PetscErrorCode ierr;
2176
2177 PetscFunctionBeginHotdo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2177; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_TRUE || petscstack->hotdepth); } ; } while (0);
; } while (0)
;
2178 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2178,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2178,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2178,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2178,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2179 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2179,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2180 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2180,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2181 if (!nrow || !ncol) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
; /* no values to insert */
2182 PetscValidIntPointer(irow,3)do { if (!irow) return PetscError(((MPI_Comm)0x44000001),2182
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(irow,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),2182,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
2183 PetscValidIntPointer(icol,5)do { if (!icol) return PetscError(((MPI_Comm)0x44000001),2183
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(icol,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),2183,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,5); } while (0)
;
2184 PetscValidScalarPointer(y,6)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2184,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(y,
PETSC_DOUBLE)) return PetscError(((MPI_Comm)0x44000001),2184,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,6); } while (0)
;
2185 if (mat->insertmode == NOT_SET_VALUES) {
2186 mat->insertmode = addv;
2187 }
2188#if defined(PETSC_USE_DEBUG1)
2189 else if (mat->insertmode != addv) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values")return PetscError(((MPI_Comm)0x44000001),2189,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Cannot mix add values and insert values"
)
;
2190 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),2190,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
2191 if (!mat->ops->setvaluesblockedlocal && !mat->ops->setvaluesblocked && !mat->ops->setvalueslocal && !mat->ops->setvalues) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),2191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
2192#endif
2193
2194 if (mat->assembled) {
2195 mat->was_assembled = PETSC_TRUE;
2196 mat->assembled = PETSC_FALSE;
2197 }
2198#if defined(PETSC_USE_DEBUG1)
2199 /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2200 if (mat->rmap->mapping) {
2201 PetscInt irbs, rbs;
2202 ierr = MatGetBlockSizes(mat, &rbs, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2202,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2203 ierr = ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping,&irbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2203,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2204 if (rbs != irbs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Different row block sizes! mat %D, row l2g map %D",rbs,irbs)return PetscError(PetscObjectComm((PetscObject)mat),2204,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Different row block sizes! mat %D, row l2g map %D",rbs,irbs
)
;
2205 }
2206 if (mat->cmap->mapping) {
2207 PetscInt icbs, cbs;
2208 ierr = MatGetBlockSizes(mat,NULL((void*)0),&cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2208,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2209 ierr = ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping,&icbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2210 if (cbs != icbs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Different col block sizes! mat %D, col l2g map %D",cbs,icbs)return PetscError(PetscObjectComm((PetscObject)mat),2210,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Different col block sizes! mat %D, col l2g map %D",cbs,icbs
)
;
2211 }
2212#endif
2213 ierr = PetscLogEventBegin(MAT_SetValues,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLB)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2213,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2214 if (mat->ops->setvaluesblockedlocal) {
2215 ierr = (*mat->ops->setvaluesblockedlocal)(mat,nrow,irow,ncol,icol,y,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2215,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2216 } else {
2217 PetscInt buf[8192],*bufr=0,*bufc=0,*irowm,*icolm;
2218 if ((nrow+ncol) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
2219 irowm = buf; icolm = buf + nrow;
2220 } else {
2221 ierr = PetscMalloc2(nrow,&bufr,ncol,&bufc)PetscMallocA(2,PETSC_FALSE,2221,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nrow)*sizeof(**(&bufr)),(&bufr),(size_t)(ncol
)*sizeof(**(&bufc)),(&bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2221,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2222 irowm = bufr; icolm = bufc;
2223 }
2224 ierr = ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping,nrow,irow,irowm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2224,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2225 ierr = ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping,ncol,icol,icolm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2225,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2226 ierr = MatSetValuesBlocked(mat,nrow,irowm,ncol,icolm,y,addv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2226,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2227 ierr = PetscFree2(bufr,bufc)PetscFreeA(2,2227,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(bufr),&(bufc))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2227,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2228 }
2229 ierr = PetscLogEventEnd(MAT_SetValues,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SetValues].active) ? (*PetscLogPLE)((MAT_SetValues),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2229,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2230 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2231}
2232
2233/*@
2234 MatMultDiagonalBlock - Computes the matrix-vector product, y = Dx. Where D is defined by the inode or block structure of the diagonal
2235
2236 Collective on Mat
2237
2238 Input Parameters:
2239+ mat - the matrix
2240- x - the vector to be multiplied
2241
2242 Output Parameters:
2243. y - the result
2244
2245 Notes:
2246 The vectors x and y cannot be the same. I.e., one cannot
2247 call MatMult(A,y,y).
2248
2249 Level: developer
2250
2251.seealso: MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
2252@*/
2253PetscErrorCode MatMultDiagonalBlock(Mat mat,Vec x,Vec y)
2254{
2255 PetscErrorCode ierr;
2256
2257 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2257; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2258 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2258,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2258,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2258,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2258,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2259 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2259,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2260 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2260,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2260,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2260,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2260,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2261 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2261,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2261,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2261,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2261,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2262
2263 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2263,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2264 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2264,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2265 if (x == y) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2265,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"x and y must be different vectors")
;
2266 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2266,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2267
2268 if (!mat->ops->multdiagonalblock) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"This matrix type does not have a multiply defined")return PetscError(PetscObjectComm((PetscObject)mat),2268,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"This matrix type does not have a multiply defined")
;
2269 ierr = (*mat->ops->multdiagonalblock)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2269,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2270 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2270,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2271 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2272}
2273
2274/* --------------------------------------------------------*/
2275/*@
2276 MatMult - Computes the matrix-vector product, y = Ax.
2277
2278 Neighbor-wise Collective on Mat
2279
2280 Input Parameters:
2281+ mat - the matrix
2282- x - the vector to be multiplied
2283
2284 Output Parameters:
2285. y - the result
2286
2287 Notes:
2288 The vectors x and y cannot be the same. I.e., one cannot
2289 call MatMult(A,y,y).
2290
2291 Level: beginner
2292
2293.seealso: MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
2294@*/
2295PetscErrorCode MatMult(Mat mat,Vec x,Vec y)
2296{
2297 PetscErrorCode ierr;
2298
2299 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2299; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2300 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2300,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2300,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2300,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2300,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2301 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2301,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2302 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2302,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2302,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2303 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2303,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2303,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2304 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2304,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2305 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2305,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2306 if (x == y) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2306,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"x and y must be different vectors")
;
2307#if !defined(PETSC_HAVE_CONSTRAINTS)
2308 if (mat->cmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2308,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->cmap->N,x->map->N)
;
2309 if (mat->rmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2309,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->rmap->N,y->map->N)
;
2310 if (mat->rmap->n != y->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: local dim %D %D",mat->rmap->n,y->map->n)return PetscError(((MPI_Comm)0x44000001),2310,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: local dim %D %D",mat->
rmap->n,y->map->n)
;
2311#endif
2312 ierr = VecSetErrorIfLocked(y,3);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2312,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2313 if (mat->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2313,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
2314 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2314,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2315
2316 ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2316,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2317 if (!mat->ops->mult) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"This matrix type does not have a multiply defined")return PetscError(PetscObjectComm((PetscObject)mat),2317,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"This matrix type does not have a multiply defined")
;
2318 ierr = PetscLogEventBegin(MAT_Mult,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Mult].active) ? (*PetscLogPLB)((MAT_Mult),0,(PetscObject
)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2318,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2319 ierr = (*mat->ops->mult)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2319,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2320 ierr = PetscLogEventEnd(MAT_Mult,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Mult].active) ? (*PetscLogPLE)((MAT_Mult),0,(PetscObject
)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2320,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2321 if (mat->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2321,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
2322 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2322,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2323 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2324}
2325
2326/*@
2327 MatMultTranspose - Computes matrix transpose times a vector y = A^T * x.
2328
2329 Neighbor-wise Collective on Mat
2330
2331 Input Parameters:
2332+ mat - the matrix
2333- x - the vector to be multiplied
2334
2335 Output Parameters:
2336. y - the result
2337
2338 Notes:
2339 The vectors x and y cannot be the same. I.e., one cannot
2340 call MatMultTranspose(A,y,y).
2341
2342 For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2343 use MatMultHermitianTranspose()
2344
2345 Level: beginner
2346
2347.seealso: MatMult(), MatMultAdd(), MatMultTransposeAdd(), MatMultHermitianTranspose(), MatTranspose()
2348@*/
2349PetscErrorCode MatMultTranspose(Mat mat,Vec x,Vec y)
2350{
2351 PetscErrorCode ierr;
2352
2353 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2353; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2354 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2354,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2354,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2354,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2354,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2355 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2355,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2356 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2356,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2356,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2356,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2356,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2357 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2357,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2357,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2357,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2357,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2358
2359 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2359,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2360 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2360,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2361 if (x == y) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2361,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"x and y must be different vectors")
;
2362#if !defined(PETSC_HAVE_CONSTRAINTS)
2363 if (mat->rmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2363,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->rmap->N,x->map->N)
;
2364 if (mat->cmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->cmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2364,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->cmap->N,y->map->N)
;
2365#endif
2366 if (mat->erroriffailure) {ierr = VecValidValues(x,2,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2366,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
2367 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2367,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2368
2369 if (!mat->ops->multtranspose) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"This matrix type does not have a multiply transpose defined")return PetscError(PetscObjectComm((PetscObject)mat),2369,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"This matrix type does not have a multiply transpose defined"
)
;
2370 ierr = PetscLogEventBegin(MAT_MultTranspose,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultTranspose].active) ? (*PetscLogPLB)((MAT_MultTranspose
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2370,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2371 ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2371,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2372 ierr = (*mat->ops->multtranspose)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2372,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2373 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2373,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2374 ierr = PetscLogEventEnd(MAT_MultTranspose,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultTranspose].active) ? (*PetscLogPLE)((MAT_MultTranspose
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2374,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2375 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2375,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2376 if (mat->erroriffailure) {ierr = VecValidValues(y,3,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2376,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
2377 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2378}
2379
2380/*@
2381 MatMultHermitianTranspose - Computes matrix Hermitian transpose times a vector.
2382
2383 Neighbor-wise Collective on Mat
2384
2385 Input Parameters:
2386+ mat - the matrix
2387- x - the vector to be multilplied
2388
2389 Output Parameters:
2390. y - the result
2391
2392 Notes:
2393 The vectors x and y cannot be the same. I.e., one cannot
2394 call MatMultHermitianTranspose(A,y,y).
2395
2396 Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2397
2398 For real numbers MatMultTranspose() and MatMultHermitianTranspose() are identical.
2399
2400 Level: beginner
2401
2402.seealso: MatMult(), MatMultAdd(), MatMultHermitianTransposeAdd(), MatMultTranspose()
2403@*/
2404PetscErrorCode MatMultHermitianTranspose(Mat mat,Vec x,Vec y)
2405{
2406 PetscErrorCode ierr;
2407 Vec w;
2408
2409 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2409; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2410 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2410,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2410,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2410,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2410,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2411 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2411,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2412 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2412,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2412,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2412,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2412,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2413 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2413,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2413,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2413,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2413,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2414
2415 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2415,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2416 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2416,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2417 if (x == y) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2417,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"x and y must be different vectors")
;
2418#if !defined(PETSC_HAVE_CONSTRAINTS)
2419 if (mat->rmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2419,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->rmap->N,x->map->N)
;
2420 if (mat->cmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->cmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2420,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->cmap->N,y->map->N)
;
2421#endif
2422 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2422,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2423
2424 ierr = PetscLogEventBegin(MAT_MultHermitianTranspose,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultHermitianTranspose].active) ? (*PetscLogPLB)((MAT_MultHermitianTranspose
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2424,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2425 if (mat->ops->multhermitiantranspose) {
2426 ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2426,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2427 ierr = (*mat->ops->multhermitiantranspose)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2427,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2428 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2429 } else {
2430 ierr = VecDuplicate(x,&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2430,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2431 ierr = VecCopy(x,w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2431,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2432 ierr = VecConjugate(w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2432,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2433 ierr = MatMultTranspose(mat,w,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2433,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2434 ierr = VecDestroy(&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2434,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2435 ierr = VecConjugate(y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2435,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2436 }
2437 ierr = PetscLogEventEnd(MAT_MultHermitianTranspose,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultHermitianTranspose].active) ? (*PetscLogPLE)((MAT_MultHermitianTranspose
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2437,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2438 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2438,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2439 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2440}
2441
2442/*@
2443 MatMultAdd - Computes v3 = v2 + A * v1.
2444
2445 Neighbor-wise Collective on Mat
2446
2447 Input Parameters:
2448+ mat - the matrix
2449- v1, v2 - the vectors
2450
2451 Output Parameters:
2452. v3 - the result
2453
2454 Notes:
2455 The vectors v1 and v3 cannot be the same. I.e., one cannot
2456 call MatMultAdd(A,v1,v2,v1).
2457
2458 Level: beginner
2459
2460.seealso: MatMultTranspose(), MatMult(), MatMultTransposeAdd()
2461@*/
2462PetscErrorCode MatMultAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2463{
2464 PetscErrorCode ierr;
2465
2466 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2466; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2467 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2467,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2467,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2467,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2467,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2468 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2468,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2469 PetscValidHeaderSpecific(v1,VEC_CLASSID,2)do { if (!v1) return PetscError(((MPI_Comm)0x44000001),2469,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v1,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2469,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v1))->classid != VEC_CLASSID) { if
(((PetscObject)(v1))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2469,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2469,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2470 PetscValidHeaderSpecific(v2,VEC_CLASSID,3)do { if (!v2) return PetscError(((MPI_Comm)0x44000001),2470,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(v2,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2470,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(v2))->classid != VEC_CLASSID) { if
(((PetscObject)(v2))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2470,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2470,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2471 PetscValidHeaderSpecific(v3,VEC_CLASSID,4)do { if (!v3) return PetscError(((MPI_Comm)0x44000001),2471,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(v3,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2471,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(v3))->classid != VEC_CLASSID) { if
(((PetscObject)(v3))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2471,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),2471,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
2472
2473 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2473,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2474 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2474,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2475 if (mat->cmap->N != v1->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %D %D",mat->cmap->N,v1->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2475,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v1: global dim %D %D",mat->cmap->N,v1->
map->N)
;
2476 /* if (mat->rmap->N != v2->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %D %D",mat->rmap->N,v2->map->N);
2477 if (mat->rmap->N != v3->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %D %D",mat->rmap->N,v3->map->N); */
2478 if (mat->rmap->n != v3->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: local dim %D %D",mat->rmap->n,v3->map->n)return PetscError(((MPI_Comm)0x44000001),2478,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec v3: local dim %D %D",mat
->rmap->n,v3->map->n)
;
2479 if (mat->rmap->n != v2->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: local dim %D %D",mat->rmap->n,v2->map->n)return PetscError(((MPI_Comm)0x44000001),2479,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec v2: local dim %D %D",mat
->rmap->n,v2->map->n)
;
2480 if (v1 == v3) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2480,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"v1 and v3 must be different vectors")
;
2481 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2481,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2482
2483 if (!mat->ops->multadd) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No MatMultAdd() for matrix type '%s'",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),2483,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"No MatMultAdd() for matrix type '%s'",((PetscObject)mat)->
type_name)
;
2484 ierr = PetscLogEventBegin(MAT_MultAdd,mat,v1,v2,v3)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultAdd].active) ? (*PetscLogPLB)((MAT_MultAdd),0,(PetscObject
)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject)(v3))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2484,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2485 ierr = VecLockReadPush(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2485,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2486 ierr = (*mat->ops->multadd)(mat,v1,v2,v3);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2486,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2487 ierr = VecLockReadPop(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2487,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2488 ierr = PetscLogEventEnd(MAT_MultAdd,mat,v1,v2,v3)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultAdd].active) ? (*PetscLogPLE)((MAT_MultAdd),0,(PetscObject
)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject)(v3))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2488,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2489 ierr = PetscObjectStateIncrease((PetscObject)v3)(((PetscObject)v3)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2489,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2490 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2491}
2492
2493/*@
2494 MatMultTransposeAdd - Computes v3 = v2 + A' * v1.
2495
2496 Neighbor-wise Collective on Mat
2497
2498 Input Parameters:
2499+ mat - the matrix
2500- v1, v2 - the vectors
2501
2502 Output Parameters:
2503. v3 - the result
2504
2505 Notes:
2506 The vectors v1 and v3 cannot be the same. I.e., one cannot
2507 call MatMultTransposeAdd(A,v1,v2,v1).
2508
2509 Level: beginner
2510
2511.seealso: MatMultTranspose(), MatMultAdd(), MatMult()
2512@*/
2513PetscErrorCode MatMultTransposeAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2514{
2515 PetscErrorCode ierr;
2516
2517 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2517; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2518 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2518,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2518,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2518,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2518,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2519 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2519,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2520 PetscValidHeaderSpecific(v1,VEC_CLASSID,2)do { if (!v1) return PetscError(((MPI_Comm)0x44000001),2520,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v1,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2520,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v1))->classid != VEC_CLASSID) { if
(((PetscObject)(v1))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2520,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2520,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2521 PetscValidHeaderSpecific(v2,VEC_CLASSID,3)do { if (!v2) return PetscError(((MPI_Comm)0x44000001),2521,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(v2,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2521,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(v2))->classid != VEC_CLASSID) { if
(((PetscObject)(v2))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2521,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2521,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2522 PetscValidHeaderSpecific(v3,VEC_CLASSID,4)do { if (!v3) return PetscError(((MPI_Comm)0x44000001),2522,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(v3,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2522,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(v3))->classid != VEC_CLASSID) { if
(((PetscObject)(v3))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2522,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),2522,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
2523
2524 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2524,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2525 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2525,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2526 if (!mat->ops->multtransposeadd) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),2526,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
2527 if (v1 == v3) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2527,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"v1 and v3 must be different vectors")
;
2528 if (mat->rmap->N != v1->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %D %D",mat->rmap->N,v1->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2528,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v1: global dim %D %D",mat->rmap->N,v1->
map->N)
;
2529 if (mat->cmap->N != v2->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %D %D",mat->cmap->N,v2->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2529,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v2: global dim %D %D",mat->cmap->N,v2->
map->N)
;
2530 if (mat->cmap->N != v3->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %D %D",mat->cmap->N,v3->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2530,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v3: global dim %D %D",mat->cmap->N,v3->
map->N)
;
2531 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2531,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2532
2533 ierr = PetscLogEventBegin(MAT_MultTransposeAdd,mat,v1,v2,v3)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultTransposeAdd].active) ? (*PetscLogPLB)((MAT_MultTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject
)(v3)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2533,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2534 ierr = VecLockReadPush(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2534,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2535 ierr = (*mat->ops->multtransposeadd)(mat,v1,v2,v3);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2535,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2536 ierr = VecLockReadPop(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2536,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2537 ierr = PetscLogEventEnd(MAT_MultTransposeAdd,mat,v1,v2,v3)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultTransposeAdd].active) ? (*PetscLogPLE)((MAT_MultTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject
)(v3)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2537,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2538 ierr = PetscObjectStateIncrease((PetscObject)v3)(((PetscObject)v3)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2538,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2539 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2540}
2541
2542/*@
2543 MatMultHermitianTransposeAdd - Computes v3 = v2 + A^H * v1.
2544
2545 Neighbor-wise Collective on Mat
2546
2547 Input Parameters:
2548+ mat - the matrix
2549- v1, v2 - the vectors
2550
2551 Output Parameters:
2552. v3 - the result
2553
2554 Notes:
2555 The vectors v1 and v3 cannot be the same. I.e., one cannot
2556 call MatMultHermitianTransposeAdd(A,v1,v2,v1).
2557
2558 Level: beginner
2559
2560.seealso: MatMultHermitianTranspose(), MatMultTranspose(), MatMultAdd(), MatMult()
2561@*/
2562PetscErrorCode MatMultHermitianTransposeAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2563{
2564 PetscErrorCode ierr;
2565
2566 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2566; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2567 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2567,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2567,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2567,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2567,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2568 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2568,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2569 PetscValidHeaderSpecific(v1,VEC_CLASSID,2)do { if (!v1) return PetscError(((MPI_Comm)0x44000001),2569,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v1,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2569,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v1))->classid != VEC_CLASSID) { if
(((PetscObject)(v1))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2569,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2570 PetscValidHeaderSpecific(v2,VEC_CLASSID,3)do { if (!v2) return PetscError(((MPI_Comm)0x44000001),2570,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(v2,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2570,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(v2))->classid != VEC_CLASSID) { if
(((PetscObject)(v2))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2570,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2570,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2571 PetscValidHeaderSpecific(v3,VEC_CLASSID,4)do { if (!v3) return PetscError(((MPI_Comm)0x44000001),2571,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(v3,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),2571,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(v3))->classid != VEC_CLASSID) { if
(((PetscObject)(v3))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),2571,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),2571,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
2572
2573 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2573,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2574 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2574,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2575 if (v1 == v3) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),2575,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"v1 and v3 must be different vectors")
;
2576 if (mat->rmap->N != v1->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %D %D",mat->rmap->N,v1->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2576,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v1: global dim %D %D",mat->rmap->N,v1->
map->N)
;
2577 if (mat->cmap->N != v2->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %D %D",mat->cmap->N,v2->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2577,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v2: global dim %D %D",mat->cmap->N,v2->
map->N)
;
2578 if (mat->cmap->N != v3->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %D %D",mat->cmap->N,v3->map->N)return PetscError(PetscObjectComm((PetscObject)mat),2578,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec v3: global dim %D %D",mat->cmap->N,v3->
map->N)
;
2579 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2579,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2580
2581 ierr = PetscLogEventBegin(MAT_MultHermitianTransposeAdd,mat,v1,v2,v3)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultHermitianTransposeAdd].active) ? (*PetscLogPLB)((MAT_MultHermitianTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject
)(v3)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2581,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2582 ierr = VecLockReadPush(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2582,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2583 if (mat->ops->multhermitiantransposeadd) {
2584 ierr = (*mat->ops->multhermitiantransposeadd)(mat,v1,v2,v3);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2584,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2585 } else {
2586 Vec w,z;
2587 ierr = VecDuplicate(v1,&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2587,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2588 ierr = VecCopy(v1,w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2588,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2589 ierr = VecConjugate(w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2589,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2590 ierr = VecDuplicate(v3,&z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2590,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2591 ierr = MatMultTranspose(mat,w,z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2591,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2592 ierr = VecDestroy(&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2592,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2593 ierr = VecConjugate(z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2593,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2594 if (v2 != v3) {
2595 ierr = VecWAXPY(v3,1.0,v2,z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2595,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2596 } else {
2597 ierr = VecAXPY(v3,1.0,z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2597,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2598 }
2599 ierr = VecDestroy(&z);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2599,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2600 }
2601 ierr = VecLockReadPop(v1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2601,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2602 ierr = PetscLogEventEnd(MAT_MultHermitianTransposeAdd,mat,v1,v2,v3)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultHermitianTransposeAdd].active) ? (*PetscLogPLE)((MAT_MultHermitianTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(v1),(PetscObject)(v2),(PetscObject
)(v3)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2602,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2603 ierr = PetscObjectStateIncrease((PetscObject)v3)(((PetscObject)v3)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2603,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2604 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2605}
2606
2607/*@
2608 MatMultConstrained - The inner multiplication routine for a
2609 constrained matrix P^T A P.
2610
2611 Neighbor-wise Collective on Mat
2612
2613 Input Parameters:
2614+ mat - the matrix
2615- x - the vector to be multilplied
2616
2617 Output Parameters:
2618. y - the result
2619
2620 Notes:
2621 The vectors x and y cannot be the same. I.e., one cannot
2622 call MatMult(A,y,y).
2623
2624 Level: beginner
2625
2626.seealso: MatMult(), MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
2627@*/
2628PetscErrorCode MatMultConstrained(Mat mat,Vec x,Vec y)
2629{
2630 PetscErrorCode ierr;
2631
2632 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2632; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2633 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2633,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2633,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2633,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2633,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2634 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2634,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2634,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2634,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2634,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2635 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2635,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2635,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2635,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2635,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2636 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),2636,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
2637 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),2637,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
2638 if (x == y) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(((MPI_Comm)0x44000001),2638,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"x and y must be different vectors")
;
2639 if (mat->cmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2639,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->cmap->N,x->map->N)
;
2640 if (mat->rmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2640,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->rmap->N,y->map->N)
;
2641 if (mat->rmap->n != y->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: local dim %D %D",mat->rmap->n,y->map->n)return PetscError(((MPI_Comm)0x44000001),2641,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: local dim %D %D",mat->
rmap->n,y->map->n)
;
2642
2643 ierr = PetscLogEventBegin(MAT_MultConstrained,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultConstrained].active) ? (*PetscLogPLB)((MAT_MultConstrained
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2643,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2644 ierr = VecLockReadPush(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2644,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2645 ierr = (*mat->ops->multconstrained)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2645,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2646 ierr = VecLockReadPop(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2646,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2647 ierr = PetscLogEventEnd(MAT_MultConstrained,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultConstrained].active) ? (*PetscLogPLE)((MAT_MultConstrained
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2647,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2648 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2648,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2649 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2650}
2651
2652/*@
2653 MatMultTransposeConstrained - The inner multiplication routine for a
2654 constrained matrix P^T A^T P.
2655
2656 Neighbor-wise Collective on Mat
2657
2658 Input Parameters:
2659+ mat - the matrix
2660- x - the vector to be multilplied
2661
2662 Output Parameters:
2663. y - the result
2664
2665 Notes:
2666 The vectors x and y cannot be the same. I.e., one cannot
2667 call MatMult(A,y,y).
2668
2669 Level: beginner
2670
2671.seealso: MatMult(), MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
2672@*/
2673PetscErrorCode MatMultTransposeConstrained(Mat mat,Vec x,Vec y)
2674{
2675 PetscErrorCode ierr;
2676
2677 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2677; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2678 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2678,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2678,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2678,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2678,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2679 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),2679,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2679,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2679,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2679,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2680 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),2680,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),2680,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),2680,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2680,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2681 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),2681,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
2682 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),2682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
2683 if (x == y) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors")return PetscError(((MPI_Comm)0x44000001),2683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"x and y must be different vectors")
;
2684 if (mat->rmap->N != x->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(((MPI_Comm)0x44000001),2684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec x: global dim %D %D",mat
->cmap->N,x->map->N)
;
2685 if (mat->cmap->N != y->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map->N)return PetscError(((MPI_Comm)0x44000001),2685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec y: global dim %D %D",mat
->rmap->N,y->map->N)
;
2686
2687 ierr = PetscLogEventBegin(MAT_MultConstrained,mat,x,y,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultConstrained].active) ? (*PetscLogPLB)((MAT_MultConstrained
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2687,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2688 ierr = (*mat->ops->multtransposeconstrained)(mat,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2689 ierr = PetscLogEventEnd(MAT_MultConstrained,mat,x,y,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MultConstrained].active) ? (*PetscLogPLE)((MAT_MultConstrained
),0,(PetscObject)(mat),(PetscObject)(x),(PetscObject)(y),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2689,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2690 ierr = PetscObjectStateIncrease((PetscObject)y)(((PetscObject)y)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2690,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2691 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2692}
2693
2694/*@C
2695 MatGetFactorType - gets the type of factorization it is
2696
2697 Not Collective
2698
2699 Input Parameters:
2700. mat - the matrix
2701
2702 Output Parameters:
2703. t - the type, one of MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT
2704
2705 Level: intermediate
2706
2707.seealso: MatFactorType, MatGetFactor(), MatSetFactorType()
2708@*/
2709PetscErrorCode MatGetFactorType(Mat mat,MatFactorType *t)
2710{
2711 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2711; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2712 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2712,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2712,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2712,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2712,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2713 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2713,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2714 PetscValidPointer(t,2)do { if (!t) return PetscError(((MPI_Comm)0x44000001),2714,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(t,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),2714,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",2); } while (0)
;
2715 *t = mat->factortype;
2716 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2717}
2718
2719/*@C
2720 MatSetFactorType - sets the type of factorization it is
2721
2722 Logically Collective on Mat
2723
2724 Input Parameters:
2725+ mat - the matrix
2726- t - the type, one of MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT
2727
2728 Level: intermediate
2729
2730.seealso: MatFactorType, MatGetFactor(), MatGetFactorType()
2731@*/
2732PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2733{
2734 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2734; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2735 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2735,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2735,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2735,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2735,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2736 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2736,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2737 mat->factortype = t;
2738 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2739}
2740
2741/* ------------------------------------------------------------*/
2742/*@C
2743 MatGetInfo - Returns information about matrix storage (number of
2744 nonzeros, memory, etc.).
2745
2746 Collective on Mat if MAT_GLOBAL_MAX or MAT_GLOBAL_SUM is used as the flag
2747
2748 Input Parameters:
2749. mat - the matrix
2750
2751 Output Parameters:
2752+ flag - flag indicating the type of parameters to be returned
2753 (MAT_LOCAL - local matrix, MAT_GLOBAL_MAX - maximum over all processors,
2754 MAT_GLOBAL_SUM - sum over all processors)
2755- info - matrix information context
2756
2757 Notes:
2758 The MatInfo context contains a variety of matrix data, including
2759 number of nonzeros allocated and used, number of mallocs during
2760 matrix assembly, etc. Additional information for factored matrices
2761 is provided (such as the fill ratio, number of mallocs during
2762 factorization, etc.). Much of this info is printed to PETSC_STDOUT
2763 when using the runtime options
2764$ -info -mat_view ::ascii_info
2765
2766 Example for C/C++ Users:
2767 See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
2768 data within the MatInfo context. For example,
2769.vb
2770 MatInfo info;
2771 Mat A;
2772 double mal, nz_a, nz_u;
2773
2774 MatGetInfo(A,MAT_LOCAL,&info);
2775 mal = info.mallocs;
2776 nz_a = info.nz_allocated;
2777.ve
2778
2779 Example for Fortran Users:
2780 Fortran users should declare info as a double precision
2781 array of dimension MAT_INFO_SIZE, and then extract the parameters
2782 of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
2783 a complete list of parameter names.
2784.vb
2785 double precision info(MAT_INFO_SIZE)
2786 double precision mal, nz_a
2787 Mat A
2788 integer ierr
2789
2790 call MatGetInfo(A,MAT_LOCAL,info,ierr)
2791 mal = info(MAT_INFO_MALLOCS)
2792 nz_a = info(MAT_INFO_NZ_ALLOCATED)
2793.ve
2794
2795 Level: intermediate
2796
2797 Developer Note: fortran interface is not autogenerated as the f90
2798 interface defintion cannot be generated correctly [due to MatInfo]
2799
2800.seealso: MatStashGetInfo()
2801
2802@*/
2803PetscErrorCode MatGetInfo(Mat mat,MatInfoType flag,MatInfo *info)
2804{
2805 PetscErrorCode ierr;
2806
2807 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2807; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2808 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2808,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2808,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2809 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2809,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2810 PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),2810
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
2811 if (!mat->ops->getinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),2811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
2812 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2812,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2813 ierr = (*mat->ops->getinfo)(mat,flag,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2813,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2814 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2815}
2816
2817/*
2818 This is used by external packages where it is not easy to get the info from the actual
2819 matrix factorization.
2820*/
2821PetscErrorCode MatGetInfo_External(Mat A,MatInfoType flag,MatInfo *info)
2822{
2823 PetscErrorCode ierr;
2824
2825 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2825; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2826 ierr = PetscMemzero(info,sizeof(MatInfo));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2826,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2827 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2828}
2829
2830/* ----------------------------------------------------------*/
2831
2832/*@C
2833 MatLUFactor - Performs in-place LU factorization of matrix.
2834
2835 Collective on Mat
2836
2837 Input Parameters:
2838+ mat - the matrix
2839. row - row permutation
2840. col - column permutation
2841- info - options for factorization, includes
2842$ fill - expected fill as ratio of original fill.
2843$ dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
2844$ Run with the option -info to determine an optimal value to use
2845
2846 Notes:
2847 Most users should employ the simplified KSP interface for linear solvers
2848 instead of working directly with matrix algebra routines such as this.
2849 See, e.g., KSPCreate().
2850
2851 This changes the state of the matrix to a factored matrix; it cannot be used
2852 for example with MatSetValues() unless one first calls MatSetUnfactored().
2853
2854 Level: developer
2855
2856.seealso: MatLUFactorSymbolic(), MatLUFactorNumeric(), MatCholeskyFactor(),
2857 MatGetOrdering(), MatSetUnfactored(), MatFactorInfo, MatGetFactor()
2858
2859 Developer Note: fortran interface is not autogenerated as the f90
2860 interface defintion cannot be generated correctly [due to MatFactorInfo]
2861
2862@*/
2863PetscErrorCode MatLUFactor(Mat mat,IS row,IS col,const MatFactorInfo *info)
2864{
2865 PetscErrorCode ierr;
2866 MatFactorInfo tinfo;
2867
2868 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2868; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2869 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2869,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2869,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2869,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2869,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2870 if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),2870,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2870,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2870,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2870,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2871 if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),2871,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2871,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2871,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2871,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2872 if (info) PetscValidPointer(info,4)do { if (!info) return PetscError(((MPI_Comm)0x44000001),2872
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2872,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
2873 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2873,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2874 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2874,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2875 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2875,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2876 if (!mat->ops->lufactor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),2876,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
2877 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2877,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2878 if (!info) {
2879 ierr = MatFactorInfoInitialize(&tinfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2879,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2880 info = &tinfo;
2881 }
2882
2883 ierr = PetscLogEventBegin(MAT_LUFactor,mat,row,col,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactor].active) ? (*PetscLogPLB)((MAT_LUFactor),0,(PetscObject
)(mat),(PetscObject)(row),(PetscObject)(col),(PetscObject)(0)
) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2883,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2884 ierr = (*mat->ops->lufactor)(mat,row,col,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2884,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2885 ierr = PetscLogEventEnd(MAT_LUFactor,mat,row,col,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactor].active) ? (*PetscLogPLE)((MAT_LUFactor),0,(PetscObject
)(mat),(PetscObject)(row),(PetscObject)(col),(PetscObject)(0)
) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2885,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2886 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2886,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2887 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2888}
2889
2890/*@C
2891 MatILUFactor - Performs in-place ILU factorization of matrix.
2892
2893 Collective on Mat
2894
2895 Input Parameters:
2896+ mat - the matrix
2897. row - row permutation
2898. col - column permutation
2899- info - structure containing
2900$ levels - number of levels of fill.
2901$ expected fill - as ratio of original fill.
2902$ 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
2903 missing diagonal entries)
2904
2905 Notes:
2906 Probably really in-place only when level of fill is zero, otherwise allocates
2907 new space to store factored matrix and deletes previous memory.
2908
2909 Most users should employ the simplified KSP interface for linear solvers
2910 instead of working directly with matrix algebra routines such as this.
2911 See, e.g., KSPCreate().
2912
2913 Level: developer
2914
2915.seealso: MatILUFactorSymbolic(), MatLUFactorNumeric(), MatCholeskyFactor(), MatFactorInfo
2916
2917 Developer Note: fortran interface is not autogenerated as the f90
2918 interface defintion cannot be generated correctly [due to MatFactorInfo]
2919
2920@*/
2921PetscErrorCode MatILUFactor(Mat mat,IS row,IS col,const MatFactorInfo *info)
2922{
2923 PetscErrorCode ierr;
2924
2925 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2925; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2926 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2926,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2926,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2926,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2926,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2927 if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),2927,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2927,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2927,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2927,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2928 if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),2928,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2928,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2928,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2928,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2929 PetscValidPointer(info,4)do { if (!info) return PetscError(((MPI_Comm)0x44000001),2929
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2929,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
2930 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2930,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2931 if (mat->rmap->N != mat->cmap->N) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"matrix must be square")return PetscError(PetscObjectComm((PetscObject)mat),2931,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"matrix must be square")
;
2932 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2932,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2933 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2933,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2934 if (!mat->ops->ilufactor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),2934,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
2935 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2935,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
2936
2937 ierr = PetscLogEventBegin(MAT_ILUFactor,mat,row,col,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ILUFactor].active) ? (*PetscLogPLB)((MAT_ILUFactor),0,(PetscObject
)(mat),(PetscObject)(row),(PetscObject)(col),(PetscObject)(0)
) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2937,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2938 ierr = (*mat->ops->ilufactor)(mat,row,col,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2938,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2939 ierr = PetscLogEventEnd(MAT_ILUFactor,mat,row,col,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ILUFactor].active) ? (*PetscLogPLE)((MAT_ILUFactor),0,(PetscObject
)(mat),(PetscObject)(row),(PetscObject)(col),(PetscObject)(0)
) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2939,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2940 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2940,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2941 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2942}
2943
2944/*@C
2945 MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
2946 Call this routine before calling MatLUFactorNumeric().
2947
2948 Collective on Mat
2949
2950 Input Parameters:
2951+ fact - the factor matrix obtained with MatGetFactor()
2952. mat - the matrix
2953. row, col - row and column permutations
2954- info - options for factorization, includes
2955$ fill - expected fill as ratio of original fill.
2956$ dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
2957$ Run with the option -info to determine an optimal value to use
2958
2959
2960 Notes:
2961 See Users-Manual: ch_mat for additional information about choosing the fill factor for better efficiency.
2962
2963 Most users should employ the simplified KSP interface for linear solvers
2964 instead of working directly with matrix algebra routines such as this.
2965 See, e.g., KSPCreate().
2966
2967 Level: developer
2968
2969.seealso: MatLUFactor(), MatLUFactorNumeric(), MatCholeskyFactor(), MatFactorInfo, MatFactorInfoInitialize()
2970
2971 Developer Note: fortran interface is not autogenerated as the f90
2972 interface defintion cannot be generated correctly [due to MatFactorInfo]
2973
2974@*/
2975PetscErrorCode MatLUFactorSymbolic(Mat fact,Mat mat,IS row,IS col,const MatFactorInfo *info)
2976{
2977 PetscErrorCode ierr;
2978 MatFactorInfo tinfo;
2979
2980 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 2980; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
2981 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),2981,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2981,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2981,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),2981,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
2982 if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),2982,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2982,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2982,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),2982,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
2983 if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),2983,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),2983,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),2983,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),2983,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
2984 if (info) PetscValidPointer(info,4)do { if (!info) return PetscError(((MPI_Comm)0x44000001),2984
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
2985 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),2985,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
2986 PetscValidPointer(fact,5)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),2986
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),2986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
2987 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),2987,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
2988 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),2988,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
2989 if (!(fact)->ops->lufactorsymbolic) {
2990 MatSolverType spackage;
2991 ierr = MatFactorGetSolverType(fact,&spackage);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2991,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2992 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic LU using solver package %s",((PetscObject)mat)->type_name,spackage)return PetscError(PetscObjectComm((PetscObject)mat),2992,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix type %s symbolic LU using solver package %s",((PetscObject
)mat)->type_name,spackage)
;
2993 }
2994 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),2994,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
2995 if (!info) {
2996 ierr = MatFactorInfoInitialize(&tinfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2997 info = &tinfo;
2998 }
2999
3000 ierr = PetscLogEventBegin(MAT_LUFactorSymbolic,mat,row,col,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactorSymbolic].active) ? (*PetscLogPLB)((MAT_LUFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(row),(PetscObject)(col),
(PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3000,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3001 ierr = (fact->ops->lufactorsymbolic)(fact,mat,row,col,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3001,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3002 ierr = PetscLogEventEnd(MAT_LUFactorSymbolic,mat,row,col,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactorSymbolic].active) ? (*PetscLogPLE)((MAT_LUFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(row),(PetscObject)(col),
(PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3002,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3003 ierr = PetscObjectStateIncrease((PetscObject)fact)(((PetscObject)fact)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3003,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3004 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3005}
3006
3007/*@C
3008 MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3009 Call this routine after first calling MatLUFactorSymbolic().
3010
3011 Collective on Mat
3012
3013 Input Parameters:
3014+ fact - the factor matrix obtained with MatGetFactor()
3015. mat - the matrix
3016- info - options for factorization
3017
3018 Notes:
3019 See MatLUFactor() for in-place factorization. See
3020 MatCholeskyFactorNumeric() for the symmetric, positive definite case.
3021
3022 Most users should employ the simplified KSP interface for linear solvers
3023 instead of working directly with matrix algebra routines such as this.
3024 See, e.g., KSPCreate().
3025
3026 Level: developer
3027
3028.seealso: MatLUFactorSymbolic(), MatLUFactor(), MatCholeskyFactor()
3029
3030 Developer Note: fortran interface is not autogenerated as the f90
3031 interface defintion cannot be generated correctly [due to MatFactorInfo]
3032
3033@*/
3034PetscErrorCode MatLUFactorNumeric(Mat fact,Mat mat,const MatFactorInfo *info)
3035{
3036 MatFactorInfo tinfo;
3037 PetscErrorCode ierr;
3038
3039 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3039; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3040 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3040,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3040,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3040,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3040,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3041 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3041,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3042 PetscValidPointer(fact,2)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3042
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3042,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
3043 PetscValidHeaderSpecific(fact,MAT_CLASSID,2)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3043
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(fact,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3043,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(fact))->classid != MAT_CLASSID) { if
(((PetscObject)(fact))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),3043,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3043,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3044 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3044,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3045 if (mat->rmap->N != (fact)->rmap->N || mat->cmap->N != (fact)->cmap->N) SETERRQ4(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Mat fact: global dimensions are different %D should = %D %D should = %D",mat->rmap->N,(fact)->rmap->N,mat->cmap->N,(fact)->cmap->N)return PetscError(PetscObjectComm((PetscObject)mat),3045,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Mat fact: global dimensions are different %D should = %D %D should = %D"
,mat->rmap->N,(fact)->rmap->N,mat->cmap->N,
(fact)->cmap->N)
;
3046
3047 if (!(fact)->ops->lufactornumeric) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s numeric LU",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3047,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s numeric LU",((PetscObject)mat)->type_name)
;
3048 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3048,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
3049 if (!info) {
3050 ierr = MatFactorInfoInitialize(&tinfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3050,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3051 info = &tinfo;
3052 }
3053
3054 ierr = PetscLogEventBegin(MAT_LUFactorNumeric,mat,fact,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactorNumeric].active) ? (*PetscLogPLB)((MAT_LUFactorNumeric
),0,(PetscObject)(mat),(PetscObject)(fact),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3054,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3055 ierr = (fact->ops->lufactornumeric)(fact,mat,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3055,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3056 ierr = PetscLogEventEnd(MAT_LUFactorNumeric,mat,fact,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_LUFactorNumeric].active) ? (*PetscLogPLE)((MAT_LUFactorNumeric
),0,(PetscObject)(mat),(PetscObject)(fact),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3056,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3057 ierr = MatViewFromOptions(fact,NULL((void*)0),"-mat_factor_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3057,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3058 ierr = PetscObjectStateIncrease((PetscObject)fact)(((PetscObject)fact)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3058,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3059 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3060}
3061
3062/*@C
3063 MatCholeskyFactor - Performs in-place Cholesky factorization of a
3064 symmetric matrix.
3065
3066 Collective on Mat
3067
3068 Input Parameters:
3069+ mat - the matrix
3070. perm - row and column permutations
3071- f - expected fill as ratio of original fill
3072
3073 Notes:
3074 See MatLUFactor() for the nonsymmetric case. See also
3075 MatCholeskyFactorSymbolic(), and MatCholeskyFactorNumeric().
3076
3077 Most users should employ the simplified KSP interface for linear solvers
3078 instead of working directly with matrix algebra routines such as this.
3079 See, e.g., KSPCreate().
3080
3081 Level: developer
3082
3083.seealso: MatLUFactor(), MatCholeskyFactorSymbolic(), MatCholeskyFactorNumeric()
3084 MatGetOrdering()
3085
3086 Developer Note: fortran interface is not autogenerated as the f90
3087 interface defintion cannot be generated correctly [due to MatFactorInfo]
3088
3089@*/
3090PetscErrorCode MatCholeskyFactor(Mat mat,IS perm,const MatFactorInfo *info)
3091{
3092 PetscErrorCode ierr;
3093 MatFactorInfo tinfo;
3094
3095 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3095; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3096 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3096,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3096,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3096,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3096,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3097 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3097,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3098 if (perm) PetscValidHeaderSpecific(perm,IS_CLASSID,2)do { if (!perm) return PetscError(((MPI_Comm)0x44000001),3098
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(perm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3098,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(perm))->classid != IS_CLASSID) { if
(((PetscObject)(perm))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),3098,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3098,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3099 if (info) PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),3099
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3099,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
3100 if (mat->rmap->N != mat->cmap->N) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"Matrix must be square")return PetscError(PetscObjectComm((PetscObject)mat),3100,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Matrix must be square")
;
3101 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3101,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3102 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3102,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
3103 if (!mat->ops->choleskyfactor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"In-place factorization for Mat type %s is not supported, try out-of-place factorization. See MatCholeskyFactorSymbolic/Numeric",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3103,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"In-place factorization for Mat type %s is not supported, try out-of-place factorization. See MatCholeskyFactorSymbolic/Numeric"
,((PetscObject)mat)->type_name)
;
3104 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3104,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3105 if (!info) {
3106 ierr = MatFactorInfoInitialize(&tinfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3106,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3107 info = &tinfo;
3108 }
3109
3110 ierr = PetscLogEventBegin(MAT_CholeskyFactor,mat,perm,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactor].active) ? (*PetscLogPLB)((MAT_CholeskyFactor
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3110,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3111 ierr = (*mat->ops->choleskyfactor)(mat,perm,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3111,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3112 ierr = PetscLogEventEnd(MAT_CholeskyFactor,mat,perm,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactor].active) ? (*PetscLogPLE)((MAT_CholeskyFactor
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3112,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3113 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3113,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3114 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3115}
3116
3117/*@C
3118 MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3119 of a symmetric matrix.
3120
3121 Collective on Mat
3122
3123 Input Parameters:
3124+ fact - the factor matrix obtained with MatGetFactor()
3125. mat - the matrix
3126. perm - row and column permutations
3127- info - options for factorization, includes
3128$ fill - expected fill as ratio of original fill.
3129$ dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3130$ Run with the option -info to determine an optimal value to use
3131
3132 Notes:
3133 See MatLUFactorSymbolic() for the nonsymmetric case. See also
3134 MatCholeskyFactor() and MatCholeskyFactorNumeric().
3135
3136 Most users should employ the simplified KSP interface for linear solvers
3137 instead of working directly with matrix algebra routines such as this.
3138 See, e.g., KSPCreate().
3139
3140 Level: developer
3141
3142.seealso: MatLUFactorSymbolic(), MatCholeskyFactor(), MatCholeskyFactorNumeric()
3143 MatGetOrdering()
3144
3145 Developer Note: fortran interface is not autogenerated as the f90
3146 interface defintion cannot be generated correctly [due to MatFactorInfo]
3147
3148@*/
3149PetscErrorCode MatCholeskyFactorSymbolic(Mat fact,Mat mat,IS perm,const MatFactorInfo *info)
3150{
3151 PetscErrorCode ierr;
3152 MatFactorInfo tinfo;
3153
3154 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3154; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3155 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3155,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3155,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3155,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3155,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3156 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3156,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3157 if (perm) PetscValidHeaderSpecific(perm,IS_CLASSID,2)do { if (!perm) return PetscError(((MPI_Comm)0x44000001),3157
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(perm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3157,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(perm))->classid != IS_CLASSID) { if
(((PetscObject)(perm))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),3157,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3157,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3158 if (info) PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),3158
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3158,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
3159 PetscValidPointer(fact,4)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3159
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3159,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
3160 if (mat->rmap->N != mat->cmap->N) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"Matrix must be square")return PetscError(PetscObjectComm((PetscObject)mat),3160,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Matrix must be square")
;
3161 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3161,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3162 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3162,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
3163 if (!(fact)->ops->choleskyfactorsymbolic) {
3164 MatSolverType spackage;
3165 ierr = MatFactorGetSolverType(fact,&spackage);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3165,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3166 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s symbolic factor Cholesky using solver package %s",((PetscObject)mat)->type_name,spackage)return PetscError(PetscObjectComm((PetscObject)mat),3166,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s symbolic factor Cholesky using solver package %s"
,((PetscObject)mat)->type_name,spackage)
;
3167 }
3168 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3168,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
3169 if (!info) {
3170 ierr = MatFactorInfoInitialize(&tinfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3170,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3171 info = &tinfo;
3172 }
3173
3174 ierr = PetscLogEventBegin(MAT_CholeskyFactorSymbolic,mat,perm,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactorSymbolic].active) ? (*PetscLogPLB)((MAT_CholeskyFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3174,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3175 ierr = (fact->ops->choleskyfactorsymbolic)(fact,mat,perm,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3175,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3176 ierr = PetscLogEventEnd(MAT_CholeskyFactorSymbolic,mat,perm,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactorSymbolic].active) ? (*PetscLogPLE)((MAT_CholeskyFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3176,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3177 ierr = PetscObjectStateIncrease((PetscObject)fact)(((PetscObject)fact)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3177,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3178 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3179}
3180
3181/*@C
3182 MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3183 of a symmetric matrix. Call this routine after first calling
3184 MatCholeskyFactorSymbolic().
3185
3186 Collective on Mat
3187
3188 Input Parameters:
3189+ fact - the factor matrix obtained with MatGetFactor()
3190. mat - the initial matrix
3191. info - options for factorization
3192- fact - the symbolic factor of mat
3193
3194
3195 Notes:
3196 Most users should employ the simplified KSP interface for linear solvers
3197 instead of working directly with matrix algebra routines such as this.
3198 See, e.g., KSPCreate().
3199
3200 Level: developer
3201
3202.seealso: MatCholeskyFactorSymbolic(), MatCholeskyFactor(), MatLUFactorNumeric()
3203
3204 Developer Note: fortran interface is not autogenerated as the f90
3205 interface defintion cannot be generated correctly [due to MatFactorInfo]
3206
3207@*/
3208PetscErrorCode MatCholeskyFactorNumeric(Mat fact,Mat mat,const MatFactorInfo *info)
3209{
3210 MatFactorInfo tinfo;
3211 PetscErrorCode ierr;
3212
3213 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3213; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3214 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3214,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3214,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3214,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3214,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3215 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3215,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3216 PetscValidPointer(fact,2)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3216
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),3216,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
3217 PetscValidHeaderSpecific(fact,MAT_CLASSID,2)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),3217
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(fact,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3217,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(fact))->classid != MAT_CLASSID) { if
(((PetscObject)(fact))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),3217,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3217,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3218 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3218,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3219 if (!(fact)->ops->choleskyfactornumeric) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s numeric factor Cholesky",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3219,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s numeric factor Cholesky",((PetscObject)mat)->
type_name)
;
3220 if (mat->rmap->N != (fact)->rmap->N || mat->cmap->N != (fact)->cmap->N) SETERRQ4(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Mat fact: global dim %D should = %D %D should = %D",mat->rmap->N,(fact)->rmap->N,mat->cmap->N,(fact)->cmap->N)return PetscError(PetscObjectComm((PetscObject)mat),3220,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Mat fact: global dim %D should = %D %D should = %D"
,mat->rmap->N,(fact)->rmap->N,mat->cmap->N,
(fact)->cmap->N)
;
3221 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3221,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
3222 if (!info) {
3223 ierr = MatFactorInfoInitialize(&tinfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3223,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3224 info = &tinfo;
3225 }
3226
3227 ierr = PetscLogEventBegin(MAT_CholeskyFactorNumeric,mat,fact,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactorNumeric].active) ? (*PetscLogPLB)((MAT_CholeskyFactorNumeric
),0,(PetscObject)(mat),(PetscObject)(fact),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3227,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3228 ierr = (fact->ops->choleskyfactornumeric)(fact,mat,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3228,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3229 ierr = PetscLogEventEnd(MAT_CholeskyFactorNumeric,mat,fact,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CholeskyFactorNumeric].active) ? (*PetscLogPLE)((MAT_CholeskyFactorNumeric
),0,(PetscObject)(mat),(PetscObject)(fact),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3229,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3230 ierr = MatViewFromOptions(fact,NULL((void*)0),"-mat_factor_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3230,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3231 ierr = PetscObjectStateIncrease((PetscObject)fact)(((PetscObject)fact)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3231,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3232 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3233}
3234
3235/* ----------------------------------------------------------------*/
3236/*@
3237 MatSolve - Solves A x = b, given a factored matrix.
3238
3239 Neighbor-wise Collective on Mat
3240
3241 Input Parameters:
3242+ mat - the factored matrix
3243- b - the right-hand-side vector
3244
3245 Output Parameter:
3246. x - the result vector
3247
3248 Notes:
3249 The vectors b and x cannot be the same. I.e., one cannot
3250 call MatSolve(A,x,x).
3251
3252 Notes:
3253 Most users should employ the simplified KSP interface for linear solvers
3254 instead of working directly with matrix algebra routines such as this.
3255 See, e.g., KSPCreate().
3256
3257 Level: developer
3258
3259.seealso: MatSolveAdd(), MatSolveTranspose(), MatSolveTransposeAdd()
3260@*/
3261PetscErrorCode MatSolve(Mat mat,Vec b,Vec x)
3262{
3263 PetscErrorCode ierr;
3264
3265 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3265; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3266 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3266,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3266,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3266,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3266,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3267 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3267,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3268 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3268,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3268,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3268,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3268,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3269 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3269,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3269,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3269,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3269,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3270 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3270,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3270,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3271 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3271,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3271,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3272 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3272,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3273 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3273,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3274 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3274,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3275 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3276 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3277 if (!mat->ops->solve) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3277,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3278 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3279
3280 ierr = PetscLogEventBegin(MAT_Solve,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Solve].active) ? (*PetscLogPLB)((MAT_Solve),0,(PetscObject
)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3280,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3281 if (mat->factorerrortype) {
3282 ierr = PetscInfo1(mat,"MatFactorError %D\n",mat->factorerrortype)PetscInfo_Private(__func__,mat,"MatFactorError %D\n",mat->
factorerrortype)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3282,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3283 ierr = VecSetInf(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3283,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3284 } else {
3285 if (!mat->ops->solve) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3285,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3286 ierr = (*mat->ops->solve)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3286,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3287 }
3288 ierr = PetscLogEventEnd(MAT_Solve,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Solve].active) ? (*PetscLogPLE)((MAT_Solve),0,(PetscObject
)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3288,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3289 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3289,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3290 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3291}
3292
3293static PetscErrorCode MatMatSolve_Basic(Mat A,Mat B,Mat X, PetscBool trans)
3294{
3295 PetscErrorCode ierr;
3296 Vec b,x;
3297 PetscInt m,N,i;
3298 PetscScalar *bb,*xx;
3299
3300 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3300; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3301 ierr = MatDenseGetArray(B,&bb);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3301,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3302 ierr = MatDenseGetArray(X,&xx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3303 ierr = MatGetLocalSize(B,&m,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3303,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* number local rows */
3304 ierr = MatGetSize(B,NULL((void*)0),&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3304,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* total columns in dense matrix */
3305 ierr = MatCreateVecs(A,&x,&b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3305,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3306 for (i=0; i<N; i++) {
3307 ierr = VecPlaceArray(b,bb + i*m);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3307,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3308 ierr = VecPlaceArray(x,xx + i*m);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3308,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3309 if (trans) {
3310 ierr = MatSolveTranspose(A,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3310,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3311 } else {
3312 ierr = MatSolve(A,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3312,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3313 }
3314 ierr = VecResetArray(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3314,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3315 ierr = VecResetArray(b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3315,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3316 }
3317 ierr = VecDestroy(&b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3317,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3318 ierr = VecDestroy(&x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3318,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3319 ierr = MatDenseRestoreArray(B,&bb);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3319,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3320 ierr = MatDenseRestoreArray(X,&xx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3320,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3321 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3322}
3323
3324/*@
3325 MatMatSolve - Solves A X = B, given a factored matrix.
3326
3327 Neighbor-wise Collective on Mat
3328
3329 Input Parameters:
3330+ A - the factored matrix
3331- B - the right-hand-side matrix (dense matrix)
3332
3333 Output Parameter:
3334. X - the result matrix (dense matrix)
3335
3336 Notes:
3337 The matrices b and x cannot be the same. I.e., one cannot
3338 call MatMatSolve(A,x,x).
3339
3340 Notes:
3341 Most users should usually employ the simplified KSP interface for linear solvers
3342 instead of working directly with matrix algebra routines such as this.
3343 See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3344 at a time.
3345
3346 When using SuperLU_Dist as a parallel solver PETSc will use the SuperLU_Dist functionality to solve multiple right hand sides simultaneously. For MUMPS
3347 it calls a separate solve for each right hand side since MUMPS does not yet support distributed right hand sides.
3348
3349 Since the resulting matrix X must always be dense we do not support sparse representation of the matrix B.
3350
3351 Level: developer
3352
3353.seealso: MatMatSolveTranspose(), MatLUFactor(), MatCholeskyFactor()
3354@*/
3355PetscErrorCode MatMatSolve(Mat A,Mat B,Mat X)
3356{
3357 PetscErrorCode ierr;
3358
3359 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3359; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3360 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),3360,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3360,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3360,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3360,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3361 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),3361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
3362 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),3362,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3362,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3362,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3362,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3363 PetscValidHeaderSpecific(X,MAT_CLASSID,3)do { if (!X) return PetscError(((MPI_Comm)0x44000001),3363,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(X,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3363,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(X))->classid != MAT_CLASSID) { if (
((PetscObject)(X))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3363,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3363,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3364 PetscCheckSameComm(A,1,B,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)B),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3364,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3364,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3365 PetscCheckSameComm(A,1,X,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)X),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3365,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3365,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3366 if (X == B) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_IDN,"X and B must be different matrices")return PetscError(PetscObjectComm((PetscObject)A),3366,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"X and B must be different matrices")
;
3367 if (A->cmap->N != X->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3367,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->
N)
;
3368 if (A->rmap->N != B->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %D %D",A->rmap->N,B->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3368,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat B: global dim %D %D",A->rmap->N,B->rmap->
N)
;
3369 if (X->cmap->N < B->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as rhs matrix")return PetscError(((MPI_Comm)0x44000001),3369,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Solution matrix must have same number of columns as rhs matrix"
)
;
3370 if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3371 if (!A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)A),3371,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3372 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3372,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
3373
3374 ierr = PetscLogEventBegin(MAT_MatSolve,A,B,X,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatSolve].active) ? (*PetscLogPLB)((MAT_MatSolve),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(X),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3374,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3375 if (!A->ops->matsolve) {
3376 ierr = PetscInfo1(A,"Mat type %s using basic MatMatSolve\n",((PetscObject)A)->type_name)PetscInfo_Private(__func__,A,"Mat type %s using basic MatMatSolve\n"
,((PetscObject)A)->type_name)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3376,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3377 ierr = MatMatSolve_Basic(A,B,X,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3377,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3378 } else {
3379 ierr = (*A->ops->matsolve)(A,B,X);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3379,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3380 }
3381 ierr = PetscLogEventEnd(MAT_MatSolve,A,B,X,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatSolve].active) ? (*PetscLogPLE)((MAT_MatSolve),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(X),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3381,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3382 ierr = PetscObjectStateIncrease((PetscObject)X)(((PetscObject)X)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3382,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3383 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3384}
3385
3386/*@
3387 MatMatSolveTranspose - Solves A^T X = B, given a factored matrix.
3388
3389 Neighbor-wise Collective on Mat
3390
3391 Input Parameters:
3392+ A - the factored matrix
3393- B - the right-hand-side matrix (dense matrix)
3394
3395 Output Parameter:
3396. X - the result matrix (dense matrix)
3397
3398 Notes:
3399 The matrices B and X cannot be the same. I.e., one cannot
3400 call MatMatSolveTranspose(A,X,X).
3401
3402 Notes:
3403 Most users should usually employ the simplified KSP interface for linear solvers
3404 instead of working directly with matrix algebra routines such as this.
3405 See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3406 at a time.
3407
3408 When using SuperLU_Dist or MUMPS as a parallel solver, PETSc will use their functionality to solve multiple right hand sides simultaneously.
3409
3410 Level: developer
3411
3412.seealso: MatMatSolve(), MatLUFactor(), MatCholeskyFactor()
3413@*/
3414PetscErrorCode MatMatSolveTranspose(Mat A,Mat B,Mat X)
3415{
3416 PetscErrorCode ierr;
3417
3418 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3418; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3419 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),3419,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3419,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3419,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3419,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3420 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),3420,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
3421 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),3421,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3421,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3421,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3421,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3422 PetscValidHeaderSpecific(X,MAT_CLASSID,3)do { if (!X) return PetscError(((MPI_Comm)0x44000001),3422,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(X,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3422,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(X))->classid != MAT_CLASSID) { if (
((PetscObject)(X))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3422,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3422,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3423 PetscCheckSameComm(A,1,B,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)B),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3423,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3423,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3424 PetscCheckSameComm(A,1,X,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)X),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3424,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3424,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3425 if (X == B) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_IDN,"X and B must be different matrices")return PetscError(PetscObjectComm((PetscObject)A),3425,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"X and B must be different matrices")
;
3426 if (A->cmap->N != X->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3426,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->
N)
;
3427 if (A->rmap->N != B->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %D %D",A->rmap->N,B->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3427,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat B: global dim %D %D",A->rmap->N,B->rmap->
N)
;
3428 if (A->rmap->n != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat A,Mat B: local dim %D %D",A->rmap->n,B->rmap->n)return PetscError(((MPI_Comm)0x44000001),3428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat A,Mat B: local dim %D %D",A->
rmap->n,B->rmap->n)
;
3429 if (X->cmap->N < B->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as rhs matrix")return PetscError(((MPI_Comm)0x44000001),3429,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Solution matrix must have same number of columns as rhs matrix"
)
;
3430 if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3431 if (!A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)A),3431,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3432 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3432,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
3433
3434 ierr = PetscLogEventBegin(MAT_MatSolve,A,B,X,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatSolve].active) ? (*PetscLogPLB)((MAT_MatSolve),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(X),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3434,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3435 if (!A->ops->matsolvetranspose) {
3436 ierr = PetscInfo1(A,"Mat type %s using basic MatMatSolveTranspose\n",((PetscObject)A)->type_name)PetscInfo_Private(__func__,A,"Mat type %s using basic MatMatSolveTranspose\n"
,((PetscObject)A)->type_name)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3436,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3437 ierr = MatMatSolve_Basic(A,B,X,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3437,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3438 } else {
3439 ierr = (*A->ops->matsolvetranspose)(A,B,X);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3439,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3440 }
3441 ierr = PetscLogEventEnd(MAT_MatSolve,A,B,X,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatSolve].active) ? (*PetscLogPLE)((MAT_MatSolve),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(X),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3441,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3442 ierr = PetscObjectStateIncrease((PetscObject)X)(((PetscObject)X)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3442,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3443 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3444}
3445
3446/*@
3447 MatMatTransposeSolve - Solves A X = B^T, given a factored matrix.
3448
3449 Neighbor-wise Collective on Mat
3450
3451 Input Parameters:
3452+ A - the factored matrix
3453- Bt - the transpose of right-hand-side matrix
3454
3455 Output Parameter:
3456. X - the result matrix (dense matrix)
3457
3458 Notes:
3459 Most users should usually employ the simplified KSP interface for linear solvers
3460 instead of working directly with matrix algebra routines such as this.
3461 See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3462 at a time.
3463
3464 For MUMPS, it only supports centralized sparse compressed column format on the host processor for right hand side matrix. User must create B^T in sparse compressed row format on the host processor and call MatMatTransposeSolve() to implement MUMPS' MatMatSolve().
3465
3466 Level: developer
3467
3468.seealso: MatMatSolve(), MatMatSolveTranspose(), MatLUFactor(), MatCholeskyFactor()
3469@*/
3470PetscErrorCode MatMatTransposeSolve(Mat A,Mat Bt,Mat X)
3471{
3472 PetscErrorCode ierr;
3473
3474 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3474; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3475 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),3475,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3475,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3475,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3475,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3476 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),3476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
3477 PetscValidHeaderSpecific(Bt,MAT_CLASSID,2)do { if (!Bt) return PetscError(((MPI_Comm)0x44000001),3477,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(Bt,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),3477,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(Bt))->classid != MAT_CLASSID) { if
(((PetscObject)(Bt))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),3477,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3477,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3478 PetscValidHeaderSpecific(X,MAT_CLASSID,3)do { if (!X) return PetscError(((MPI_Comm)0x44000001),3478,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(X,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3478,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(X))->classid != MAT_CLASSID) { if (
((PetscObject)(X))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3478,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3478,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3479 PetscCheckSameComm(A,1,Bt,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)Bt),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3479,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3479,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3480 PetscCheckSameComm(A,1,X,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)X),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3480,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3480,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3481
3482 if (X == Bt) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_IDN,"X and B must be different matrices")return PetscError(PetscObjectComm((PetscObject)A),3482,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"X and B must be different matrices")
;
3483 if (A->cmap->N != X->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),3483,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat X: global dim %D %D",A->cmap->N,X->rmap->
N)
;
3484 if (A->rmap->N != Bt->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat Bt: global dim %D %D",A->rmap->N,Bt->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),3484,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat Bt: global dim %D %D",A->rmap->N,Bt->cmap
->N)
;
3485 if (X->cmap->N < Bt->rmap->N) SETERRQ(PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as row number of the rhs matrix")return PetscError(PetscObjectComm((PetscObject)X),3485,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Solution matrix must have same number of columns as row number of the rhs matrix"
)
;
3486 if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3487 if (!A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)A),3487,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3488 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3488,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
3489
3490 if (!A->ops->mattransposesolve) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),3490,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)A)->type_name)
;
3491 ierr = PetscLogEventBegin(MAT_MatTrSolve,A,Bt,X,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTrSolve].active) ? (*PetscLogPLB)((MAT_MatTrSolve),0,
(PetscObject)(A),(PetscObject)(Bt),(PetscObject)(X),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3491,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3492 ierr = (*A->ops->mattransposesolve)(A,Bt,X);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3492,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3493 ierr = PetscLogEventEnd(MAT_MatTrSolve,A,Bt,X,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTrSolve].active) ? (*PetscLogPLE)((MAT_MatTrSolve),0,
(PetscObject)(A),(PetscObject)(Bt),(PetscObject)(X),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3493,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3494 ierr = PetscObjectStateIncrease((PetscObject)X)(((PetscObject)X)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3494,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3495 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3496}
3497
3498/*@
3499 MatForwardSolve - Solves L x = b, given a factored matrix, A = LU, or
3500 U^T*D^(1/2) x = b, given a factored symmetric matrix, A = U^T*D*U,
3501
3502 Neighbor-wise Collective on Mat
3503
3504 Input Parameters:
3505+ mat - the factored matrix
3506- b - the right-hand-side vector
3507
3508 Output Parameter:
3509. x - the result vector
3510
3511 Notes:
3512 MatSolve() should be used for most applications, as it performs
3513 a forward solve followed by a backward solve.
3514
3515 The vectors b and x cannot be the same, i.e., one cannot
3516 call MatForwardSolve(A,x,x).
3517
3518 For matrix in seqsbaij format with block size larger than 1,
3519 the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3520 MatForwardSolve() solves U^T*D y = b, and
3521 MatBackwardSolve() solves U x = y.
3522 Thus they do not provide a symmetric preconditioner.
3523
3524 Most users should employ the simplified KSP interface for linear solvers
3525 instead of working directly with matrix algebra routines such as this.
3526 See, e.g., KSPCreate().
3527
3528 Level: developer
3529
3530.seealso: MatSolve(), MatBackwardSolve()
3531@*/
3532PetscErrorCode MatForwardSolve(Mat mat,Vec b,Vec x)
3533{
3534 PetscErrorCode ierr;
3535
3536 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3536; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3537 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3537,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3537,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3537,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3537,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3538 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3538,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3539 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3539,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3539,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3539,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3539,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3540 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3540,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3540,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3540,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3540,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3541 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3541,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3541,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3542 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3542,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3542,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3543 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3543,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3544 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3544,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3545 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3545,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3546 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3547 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3548 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3548,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3549 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3549,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3550
3551 if (!mat->ops->forwardsolve) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3551,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3552 ierr = PetscLogEventBegin(MAT_ForwardSolve,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ForwardSolve].active) ? (*PetscLogPLB)((MAT_ForwardSolve
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3552,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3553 ierr = (*mat->ops->forwardsolve)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3553,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3554 ierr = PetscLogEventEnd(MAT_ForwardSolve,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ForwardSolve].active) ? (*PetscLogPLE)((MAT_ForwardSolve
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3554,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3555 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3555,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3556 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3557}
3558
3559/*@
3560 MatBackwardSolve - Solves U x = b, given a factored matrix, A = LU.
3561 D^(1/2) U x = b, given a factored symmetric matrix, A = U^T*D*U,
3562
3563 Neighbor-wise Collective on Mat
3564
3565 Input Parameters:
3566+ mat - the factored matrix
3567- b - the right-hand-side vector
3568
3569 Output Parameter:
3570. x - the result vector
3571
3572 Notes:
3573 MatSolve() should be used for most applications, as it performs
3574 a forward solve followed by a backward solve.
3575
3576 The vectors b and x cannot be the same. I.e., one cannot
3577 call MatBackwardSolve(A,x,x).
3578
3579 For matrix in seqsbaij format with block size larger than 1,
3580 the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3581 MatForwardSolve() solves U^T*D y = b, and
3582 MatBackwardSolve() solves U x = y.
3583 Thus they do not provide a symmetric preconditioner.
3584
3585 Most users should employ the simplified KSP interface for linear solvers
3586 instead of working directly with matrix algebra routines such as this.
3587 See, e.g., KSPCreate().
3588
3589 Level: developer
3590
3591.seealso: MatSolve(), MatForwardSolve()
3592@*/
3593PetscErrorCode MatBackwardSolve(Mat mat,Vec b,Vec x)
3594{
3595 PetscErrorCode ierr;
3596
3597 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3597; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3598 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3598,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3598,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3598,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3598,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3599 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3599,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3600 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3600,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3600,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3600,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3600,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3601 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3601,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3601,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3601,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3601,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3602 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3602,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3602,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3603 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3603,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3603,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3604 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3604,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3605 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3605,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3606 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3606,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3607 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3607,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3608 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3609 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3609,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3610 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3610,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3611
3612 if (!mat->ops->backwardsolve) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3612,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3613 ierr = PetscLogEventBegin(MAT_BackwardSolve,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_BackwardSolve].active) ? (*PetscLogPLB)((MAT_BackwardSolve
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3613,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3614 ierr = (*mat->ops->backwardsolve)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3614,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3615 ierr = PetscLogEventEnd(MAT_BackwardSolve,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_BackwardSolve].active) ? (*PetscLogPLE)((MAT_BackwardSolve
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3615,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3616 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3616,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3617 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3618}
3619
3620/*@
3621 MatSolveAdd - Computes x = y + inv(A)*b, given a factored matrix.
3622
3623 Neighbor-wise Collective on Mat
3624
3625 Input Parameters:
3626+ mat - the factored matrix
3627. b - the right-hand-side vector
3628- y - the vector to be added to
3629
3630 Output Parameter:
3631. x - the result vector
3632
3633 Notes:
3634 The vectors b and x cannot be the same. I.e., one cannot
3635 call MatSolveAdd(A,x,y,x).
3636
3637 Most users should employ the simplified KSP interface for linear solvers
3638 instead of working directly with matrix algebra routines such as this.
3639 See, e.g., KSPCreate().
3640
3641 Level: developer
3642
3643.seealso: MatSolve(), MatSolveTranspose(), MatSolveTransposeAdd()
3644@*/
3645PetscErrorCode MatSolveAdd(Mat mat,Vec b,Vec y,Vec x)
3646{
3647 PetscScalar one = 1.0;
3648 Vec tmp;
3649 PetscErrorCode ierr;
3650
3651 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3651; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3652 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3652,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3652,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3652,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3652,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3653 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3653,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3654 PetscValidHeaderSpecific(y,VEC_CLASSID,2)do { if (!y) return PetscError(((MPI_Comm)0x44000001),3654,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3654,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3654,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3654,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3655 PetscValidHeaderSpecific(b,VEC_CLASSID,3)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3655,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3655,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3655,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3655,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3656 PetscValidHeaderSpecific(x,VEC_CLASSID,4)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3656,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3656,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3656,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),3656,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
3657 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3657,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3657,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3658 PetscCheckSameComm(mat,1,y,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)y),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3658,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3658,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3659 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3659,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3659,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3660 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3660,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3661 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3661,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3662 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3662,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3663 if (mat->rmap->N != y->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3663,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec y: global dim %D %D",mat->rmap->N,y->map
->N)
;
3664 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3664,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3665 if (x->map->n != y->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Vec x,Vec y: local dim %D %D",x->map->n,y->map->n)return PetscError(((MPI_Comm)0x44000001),3665,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Vec x,Vec y: local dim %D %D",x->
map->n,y->map->n)
;
3666 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3667 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3667,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3668 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3668,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3669
3670 ierr = PetscLogEventBegin(MAT_SolveAdd,mat,b,x,y)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveAdd].active) ? (*PetscLogPLB)((MAT_SolveAdd),0,(PetscObject
)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(y)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3670,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3671 if (mat->ops->solveadd) {
3672 ierr = (*mat->ops->solveadd)(mat,b,y,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3672,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3673 } else {
3674 /* do the solve then the add manually */
3675 if (x != y) {
3676 ierr = MatSolve(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3676,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3677 ierr = VecAXPY(x,one,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3677,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3678 } else {
3679 ierr = VecDuplicate(x,&tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3679,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3680 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3680,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3681 ierr = VecCopy(x,tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3681,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3682 ierr = MatSolve(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3683 ierr = VecAXPY(x,one,tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3684 ierr = VecDestroy(&tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3685 }
3686 }
3687 ierr = PetscLogEventEnd(MAT_SolveAdd,mat,b,x,y)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveAdd].active) ? (*PetscLogPLE)((MAT_SolveAdd),0,(PetscObject
)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(y)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3687,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3688 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3689 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3690}
3691
3692/*@
3693 MatSolveTranspose - Solves A' x = b, given a factored matrix.
3694
3695 Neighbor-wise Collective on Mat
3696
3697 Input Parameters:
3698+ mat - the factored matrix
3699- b - the right-hand-side vector
3700
3701 Output Parameter:
3702. x - the result vector
3703
3704 Notes:
3705 The vectors b and x cannot be the same. I.e., one cannot
3706 call MatSolveTranspose(A,x,x).
3707
3708 Most users should employ the simplified KSP interface for linear solvers
3709 instead of working directly with matrix algebra routines such as this.
3710 See, e.g., KSPCreate().
3711
3712 Level: developer
3713
3714.seealso: MatSolve(), MatSolveAdd(), MatSolveTransposeAdd()
3715@*/
3716PetscErrorCode MatSolveTranspose(Mat mat,Vec b,Vec x)
3717{
3718 PetscErrorCode ierr;
3719
3720 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3720; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3721 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3721,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3721,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3721,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3721,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3722 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3722,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3723 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3723,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3723,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3723,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3723,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3724 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3724,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3724,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3724,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3724,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3725 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3725,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3725,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3726 PetscCheckSameComm(mat,1,x,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3726,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3726,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3727 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3727,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3728 if (mat->rmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3728,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map
->N)
;
3729 if (mat->cmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->cmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3729,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->cmap->N,b->map
->N)
;
3730 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3731 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3731,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3732 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3732,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3733 ierr = PetscLogEventBegin(MAT_SolveTranspose,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveTranspose].active) ? (*PetscLogPLB)((MAT_SolveTranspose
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3733,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3734 if (mat->factorerrortype) {
3735 ierr = PetscInfo1(mat,"MatFactorError %D\n",mat->factorerrortype)PetscInfo_Private(__func__,mat,"MatFactorError %D\n",mat->
factorerrortype)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3735,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3736 ierr = VecSetInf(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3736,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3737 } else {
3738 if (!mat->ops->solvetranspose) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3738,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix type %s",((PetscObject)mat)->type_name)
;
3739 ierr = (*mat->ops->solvetranspose)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3739,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3740 }
3741 ierr = PetscLogEventEnd(MAT_SolveTranspose,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveTranspose].active) ? (*PetscLogPLE)((MAT_SolveTranspose
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3741,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3742 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3742,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3743 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3744}
3745
3746/*@
3747 MatSolveTransposeAdd - Computes x = y + inv(Transpose(A)) b, given a
3748 factored matrix.
3749
3750 Neighbor-wise Collective on Mat
3751
3752 Input Parameters:
3753+ mat - the factored matrix
3754. b - the right-hand-side vector
3755- y - the vector to be added to
3756
3757 Output Parameter:
3758. x - the result vector
3759
3760 Notes:
3761 The vectors b and x cannot be the same. I.e., one cannot
3762 call MatSolveTransposeAdd(A,x,y,x).
3763
3764 Most users should employ the simplified KSP interface for linear solvers
3765 instead of working directly with matrix algebra routines such as this.
3766 See, e.g., KSPCreate().
3767
3768 Level: developer
3769
3770.seealso: MatSolve(), MatSolveAdd(), MatSolveTranspose()
3771@*/
3772PetscErrorCode MatSolveTransposeAdd(Mat mat,Vec b,Vec y,Vec x)
3773{
3774 PetscScalar one = 1.0;
3775 PetscErrorCode ierr;
3776 Vec tmp;
3777
3778 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3778; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3779 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3779,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3779,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3779,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3779,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3780 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3780,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3781 PetscValidHeaderSpecific(y,VEC_CLASSID,2)do { if (!y) return PetscError(((MPI_Comm)0x44000001),3781,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3781,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3781,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3781,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3782 PetscValidHeaderSpecific(b,VEC_CLASSID,3)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3782,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3782,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3782,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),3782,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
3783 PetscValidHeaderSpecific(x,VEC_CLASSID,4)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3783,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3783,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3783,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),3783,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
3784 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3784,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3784,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3785 PetscCheckSameComm(mat,1,y,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)y),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3785,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3785,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
3786 PetscCheckSameComm(mat,1,x,4)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3786,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3786,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,4,_7_flag); } while (0)
;
3787 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),3787,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
3788 if (mat->rmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3788,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->rmap->N,x->map
->N)
;
3789 if (mat->cmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->cmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3789,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->cmap->N,b->map
->N)
;
3790 if (mat->cmap->N != y->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %D %D",mat->cmap->N,y->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3790,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec y: global dim %D %D",mat->cmap->N,y->map
->N)
;
3791 if (x->map->n != y->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Vec x,Vec y: local dim %D %D",x->map->n,y->map->n)return PetscError(((MPI_Comm)0x44000001),3791,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Vec x,Vec y: local dim %D %D",x->
map->n,y->map->n)
;
3792 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3793 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3793,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
3794 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3794,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3795
3796 ierr = PetscLogEventBegin(MAT_SolveTransposeAdd,mat,b,x,y)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveTransposeAdd].active) ? (*PetscLogPLB)((MAT_SolveTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(y)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3796,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3797 if (mat->ops->solvetransposeadd) {
3798 if (mat->factorerrortype) {
3799 ierr = PetscInfo1(mat,"MatFactorError %D\n",mat->factorerrortype)PetscInfo_Private(__func__,mat,"MatFactorError %D\n",mat->
factorerrortype)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3799,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3800 ierr = VecSetInf(x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3800,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3801 } else {
3802 ierr = (*mat->ops->solvetransposeadd)(mat,b,y,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3802,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3803 }
3804 } else {
3805 /* do the solve then the add manually */
3806 if (x != y) {
3807 ierr = MatSolveTranspose(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3807,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3808 ierr = VecAXPY(x,one,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3809 } else {
3810 ierr = VecDuplicate(x,&tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3811 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3812 ierr = VecCopy(x,tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3812,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3813 ierr = MatSolveTranspose(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3813,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3814 ierr = VecAXPY(x,one,tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3814,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3815 ierr = VecDestroy(&tmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3815,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3816 }
3817 }
3818 ierr = PetscLogEventEnd(MAT_SolveTransposeAdd,mat,b,x,y)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SolveTransposeAdd].active) ? (*PetscLogPLE)((MAT_SolveTransposeAdd
),0,(PetscObject)(mat),(PetscObject)(b),(PetscObject)(x),(PetscObject
)(y)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3818,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3819 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3819,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3820 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3821}
3822/* ----------------------------------------------------------------*/
3823
3824/*@
3825 MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
3826
3827 Neighbor-wise Collective on Mat
3828
3829 Input Parameters:
3830+ mat - the matrix
3831. b - the right hand side
3832. omega - the relaxation factor
3833. flag - flag indicating the type of SOR (see below)
3834. shift - diagonal shift
3835. its - the number of iterations
3836- lits - the number of local iterations
3837
3838 Output Parameters:
3839. x - the solution (can contain an initial guess, use option SOR_ZERO_INITIAL_GUESS to indicate no guess)
3840
3841 SOR Flags:
3842+ SOR_FORWARD_SWEEP - forward SOR
3843. SOR_BACKWARD_SWEEP - backward SOR
3844. SOR_SYMMETRIC_SWEEP - SSOR (symmetric SOR)
3845. SOR_LOCAL_FORWARD_SWEEP - local forward SOR
3846. SOR_LOCAL_BACKWARD_SWEEP - local forward SOR
3847. SOR_LOCAL_SYMMETRIC_SWEEP - local SSOR
3848. SOR_APPLY_UPPER, SOR_APPLY_LOWER - applies
3849 upper/lower triangular part of matrix to
3850 vector (with omega)
3851- SOR_ZERO_INITIAL_GUESS - zero initial guess
3852
3853 Notes:
3854 SOR_LOCAL_FORWARD_SWEEP, SOR_LOCAL_BACKWARD_SWEEP, and
3855 SOR_LOCAL_SYMMETRIC_SWEEP perform separate independent smoothings
3856 on each processor.
3857
3858 Application programmers will not generally use MatSOR() directly,
3859 but instead will employ the KSP/PC interface.
3860
3861 Notes:
3862 for BAIJ, SBAIJ, and AIJ matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
3863
3864 Notes for Advanced Users:
3865 The flags are implemented as bitwise inclusive or operations.
3866 For example, use (SOR_ZERO_INITIAL_GUESS | SOR_SYMMETRIC_SWEEP)
3867 to specify a zero initial guess for SSOR.
3868
3869 Most users should employ the simplified KSP interface for linear solvers
3870 instead of working directly with matrix algebra routines such as this.
3871 See, e.g., KSPCreate().
3872
3873 Vectors x and b CANNOT be the same
3874
3875 Developer Note: We should add block SOR support for AIJ matrices with block size set to great than one and no inodes
3876
3877 Level: developer
3878
3879@*/
3880PetscErrorCode MatSOR(Mat mat,Vec b,PetscReal omega,MatSORType flag,PetscReal shift,PetscInt its,PetscInt lits,Vec x)
3881{
3882 PetscErrorCode ierr;
3883
3884 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3884; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3885 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),3885,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),3885,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),3885,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3885,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3886 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),3886,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
3887 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),3887,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3887,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3887,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3887,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3888 PetscValidHeaderSpecific(x,VEC_CLASSID,8)do { if (!x) return PetscError(((MPI_Comm)0x44000001),3888,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",8); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3888,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,8); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3888,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,8); else return PetscError(((MPI_Comm)0x44000001),3888,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",8); } } while (0)
;
3889 PetscCheckSameComm(mat,1,b,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)b),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3889,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3889,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3890 PetscCheckSameComm(mat,1,x,8)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)x),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3890,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3890,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,8,_7_flag); } while (0)
;
3891 if (!mat->ops->sor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),3891,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
3892 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),3892,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3893 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),3893,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
3894 if (mat->cmap->N != x->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3894,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec x: global dim %D %D",mat->cmap->N,x->map
->N)
;
3895 if (mat->rmap->N != b->map->N) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map->N)return PetscError(PetscObjectComm((PetscObject)mat),3895,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat mat,Vec b: global dim %D %D",mat->rmap->N,b->map
->N)
;
3896 if (mat->rmap->n != b->map->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %D %D",mat->rmap->n,b->map->n)return PetscError(((MPI_Comm)0x44000001),3896,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Mat mat,Vec b: local dim %D %D",mat->
rmap->n,b->map->n)
;
3897 if (its <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %D positive",its)return PetscError(((MPI_Comm)0x44000001),3897,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Relaxation requires global its %D positive"
,its)
;
3898 if (lits <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires local its %D positive",lits)return PetscError(((MPI_Comm)0x44000001),3898,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Relaxation requires local its %D positive"
,lits)
;
3899 if (b == x) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_IDN,"b and x vector cannot be the same")return PetscError(((MPI_Comm)0x44000001),3899,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,61,PETSC_ERROR_INITIAL,"b and x vector cannot be the same")
;
3900
3901 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3901,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
3902 ierr = PetscLogEventBegin(MAT_SOR,mat,b,x,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SOR].active) ? (*PetscLogPLB)((MAT_SOR),0,(PetscObject)(
mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3902,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3903 ierr =(*mat->ops->sor)(mat,b,omega,flag,shift,its,lits,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3903,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3904 ierr = PetscLogEventEnd(MAT_SOR,mat,b,x,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_SOR].active) ? (*PetscLogPLE)((MAT_SOR),0,(PetscObject)(
mat),(PetscObject)(b),(PetscObject)(x),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3904,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3905 ierr = PetscObjectStateIncrease((PetscObject)x)(((PetscObject)x)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3905,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3906 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3907}
3908
3909/*
3910 Default matrix copy routine.
3911*/
3912PetscErrorCode MatCopy_Basic(Mat A,Mat B,MatStructure str)
3913{
3914 PetscErrorCode ierr;
3915 PetscInt i,rstart = 0,rend = 0,nz;
3916 const PetscInt *cwork;
3917 const PetscScalar *vwork;
3918
3919 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3919; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3920 if (B->assembled) {
3921 ierr = MatZeroEntries(B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3921,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3922 }
3923 if (str == SAME_NONZERO_PATTERN) {
3924 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3924,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3925 for (i=rstart; i<rend; i++) {
3926 ierr = MatGetRow(A,i,&nz,&cwork,&vwork);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3926,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3927 ierr = MatSetValues(B,1,&i,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3927,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3928 ierr = MatRestoreRow(A,i,&nz,&cwork,&vwork);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3928,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3929 }
3930 } else {
3931 ierr = MatAYPX(B,0.0,A,str);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3931,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3932 }
3933 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3933,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3934 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3934,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3935 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3936}
3937
3938/*@
3939 MatCopy - Copies a matrix to another matrix.
3940
3941 Collective on Mat
3942
3943 Input Parameters:
3944+ A - the matrix
3945- str - SAME_NONZERO_PATTERN or DIFFERENT_NONZERO_PATTERN
3946
3947 Output Parameter:
3948. B - where the copy is put
3949
3950 Notes:
3951 If you use SAME_NONZERO_PATTERN then the two matrices had better have the
3952 same nonzero pattern or the routine will crash.
3953
3954 MatCopy() copies the matrix entries of a matrix to another existing
3955 matrix (after first zeroing the second matrix). A related routine is
3956 MatConvert(), which first creates a new matrix and then copies the data.
3957
3958 Level: intermediate
3959
3960.seealso: MatConvert(), MatDuplicate()
3961
3962@*/
3963PetscErrorCode MatCopy(Mat A,Mat B,MatStructure str)
3964{
3965 PetscErrorCode ierr;
3966 PetscInt i;
3967
3968 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 3968; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
3969 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),3969,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3969,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3969,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),3969,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
3970 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),3970,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),3970,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),3970,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),3970,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
3971 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),3971,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
3972 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),3972,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
3973 PetscCheckSameComm(A,1,B,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)B),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),3973,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),3973,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
3974 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3974,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
3975 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),3975,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
3976 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),3976,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
3977 if (A->rmap->N != B->rmap->N || A->cmap->N != B->cmap->N) SETERRQ4(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim (%D,%D) (%D,%D)",A->rmap->N,B->rmap->N,A->cmap->N,B->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),3977,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat B: global dim (%D,%D) (%D,%D)",A->rmap->N,B
->rmap->N,A->cmap->N,B->cmap->N)
;
3978 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),3978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
3979 if (A == B) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3980
3981 ierr = PetscLogEventBegin(MAT_Copy,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Copy].active) ? (*PetscLogPLB)((MAT_Copy),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3981,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3982 if (A->ops->copy) {
3983 ierr = (*A->ops->copy)(A,B,str);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3983,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3984 } else { /* generic conversion */
3985 ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3985,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3986 }
3987
3988 B->stencil.dim = A->stencil.dim;
3989 B->stencil.noc = A->stencil.noc;
3990 for (i=0; i<=A->stencil.dim; i++) {
3991 B->stencil.dims[i] = A->stencil.dims[i];
3992 B->stencil.starts[i] = A->stencil.starts[i];
3993 }
3994
3995 ierr = PetscLogEventEnd(MAT_Copy,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Copy].active) ? (*PetscLogPLE)((MAT_Copy),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3995,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3996 ierr = PetscObjectStateIncrease((PetscObject)B)(((PetscObject)B)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),3996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
3997 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
3998}
3999
4000/*@C
4001 MatConvert - Converts a matrix to another matrix, either of the same
4002 or different type.
4003
4004 Collective on Mat
4005
4006 Input Parameters:
4007+ mat - the matrix
4008. newtype - new matrix type. Use MATSAME to create a new matrix of the
4009 same type as the original matrix.
4010- reuse - denotes if the destination matrix is to be created or reused.
4011 Use MAT_INPLACE_MATRIX for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4012 MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX (can only be used after the first call was made with MAT_INITIAL_MATRIX, causes the matrix space in M to be reused).
4013
4014 Output Parameter:
4015. M - pointer to place new matrix
4016
4017 Notes:
4018 MatConvert() first creates a new matrix and then copies the data from
4019 the first matrix. A related routine is MatCopy(), which copies the matrix
4020 entries of one matrix to another already existing matrix context.
4021
4022 Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4023 the MPI communicator of the generated matrix is always the same as the communicator
4024 of the input matrix.
4025
4026 Level: intermediate
4027
4028.seealso: MatCopy(), MatDuplicate()
4029@*/
4030PetscErrorCode MatConvert(Mat mat, MatType newtype,MatReuse reuse,Mat *M)
4031{
4032 PetscErrorCode ierr;
4033 PetscBool sametype,issame,flg;
4034 char convname[256],mtype[256];
4035 Mat B;
4036
4037 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4037; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4038 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4038,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4038,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4038,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4038,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4039 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4039,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4040 PetscValidPointer(M,3)do { if (!M) return PetscError(((MPI_Comm)0x44000001),4040,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(M,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),4040,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
4041 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4041,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4042 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4042,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4043 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4043,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4044
4045 ierr = PetscOptionsGetString(((PetscObject)mat)->options,((PetscObject)mat)->prefix,"-matconvert_type",mtype,256,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4046 if (flg) {
4047 newtype = mtype;
4048 }
4049 ierr = PetscObjectTypeCompare((PetscObject)mat,newtype,&sametype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4049,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4050 ierr = PetscStrcmp(newtype,"same",&issame);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4050,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4051 if ((reuse == MAT_INPLACE_MATRIX) && (mat != *M)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_INPLACE_MATRIX requires same input and output matrix")return PetscError(PetscObjectComm((PetscObject)mat),4051,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MAT_INPLACE_MATRIX requires same input and output matrix")
;
4052 if ((reuse == MAT_REUSE_MATRIX) && (mat == *M)) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX")return PetscError(PetscObjectComm((PetscObject)mat),4052,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX"
)
;
4053
4054 if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4055
4056 if ((sametype || issame) && (reuse==MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4057 ierr = (*mat->ops->duplicate)(mat,MAT_COPY_VALUES,M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4057,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4058 } else {
4059 PetscErrorCode (*conv)(Mat, MatType,MatReuse,Mat*)=NULL((void*)0);
4060 const char *prefix[3] = {"seq","mpi",""};
4061 PetscInt i;
4062 /*
4063 Order of precedence:
4064 0) See if newtype is a superclass of the current matrix.
4065 1) See if a specialized converter is known to the current matrix.
4066 2) See if a specialized converter is known to the desired matrix class.
4067 3) See if a good general converter is registered for the desired class
4068 (as of 6/27/03 only MATMPIADJ falls into this category).
4069 4) See if a good general converter is known for the current matrix.
4070 5) Use a really basic converter.
4071 */
4072
4073 /* 0) See if newtype is a superclass of the current matrix.
4074 i.e mat is mpiaij and newtype is aij */
4075 for (i=0; i<2; i++) {
4076 ierr = PetscStrncpy(convname,prefix[i],sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4076,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4077 ierr = PetscStrlcat(convname,newtype,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4077,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4078 ierr = PetscStrcmp(convname,((PetscObject)mat)->type_name,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4078,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4079 ierr = PetscInfo3(mat,"Check superclass %s %s -> %d\n",convname,((PetscObject)mat)->type_name,flg)PetscInfo_Private(__func__,mat,"Check superclass %s %s -> %d\n"
,convname,((PetscObject)mat)->type_name,flg)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4079,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4080 if (flg) {
4081 if (reuse == MAT_INPLACE_MATRIX) {
4082 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4083 } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4084 ierr = (*mat->ops->duplicate)(mat,MAT_COPY_VALUES,M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4084,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4085 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4086 } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4087 ierr = MatCopy(mat,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4087,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4088 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4089 }
4090 }
4091 }
4092 /* 1) See if a specialized converter is known to the current matrix and the desired class */
4093 for (i=0; i<3; i++) {
4094 ierr = PetscStrncpy(convname,"MatConvert_",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4094,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4095 ierr = PetscStrlcat(convname,((PetscObject)mat)->type_name,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4095,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4096 ierr = PetscStrlcat(convname,"_",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4096,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4097 ierr = PetscStrlcat(convname,prefix[i],sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4097,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4098 ierr = PetscStrlcat(convname,issame ? ((PetscObject)mat)->type_name : newtype,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4098,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4099 ierr = PetscStrlcat(convname,"_C",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4099,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4100 ierr = PetscObjectQueryFunction((PetscObject)mat,convname,&conv)PetscObjectQueryFunction_Private(((PetscObject)mat),(convname
),(PetscVoidFunction*)(&conv))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4100,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4101 ierr = PetscInfo3(mat,"Check specialized (1) %s (%s) -> %d\n",convname,((PetscObject)mat)->type_name,!!conv)PetscInfo_Private(__func__,mat,"Check specialized (1) %s (%s) -> %d\n"
,convname,((PetscObject)mat)->type_name,!!conv)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4101,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4102 if (conv) goto foundconv;
4103 }
4104
4105 /* 2) See if a specialized converter is known to the desired matrix class. */
4106 ierr = MatCreate(PetscObjectComm((PetscObject)mat),&B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4106,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4107 ierr = MatSetSizes(B,mat->rmap->n,mat->cmap->n,mat->rmap->N,mat->cmap->N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4107,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4108 ierr = MatSetType(B,newtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4108,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4109 for (i=0; i<3; i++) {
4110 ierr = PetscStrncpy(convname,"MatConvert_",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4110,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4111 ierr = PetscStrlcat(convname,((PetscObject)mat)->type_name,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4111,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4112 ierr = PetscStrlcat(convname,"_",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4112,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4113 ierr = PetscStrlcat(convname,prefix[i],sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4113,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4114 ierr = PetscStrlcat(convname,newtype,sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4114,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4115 ierr = PetscStrlcat(convname,"_C",sizeof(convname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4115,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4116 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv)PetscObjectQueryFunction_Private(((PetscObject)B),(convname),
(PetscVoidFunction*)(&conv))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4116,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4117 ierr = PetscInfo3(mat,"Check specialized (2) %s (%s) -> %d\n",convname,((PetscObject)B)->type_name,!!conv)PetscInfo_Private(__func__,mat,"Check specialized (2) %s (%s) -> %d\n"
,convname,((PetscObject)B)->type_name,!!conv)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4117,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4118 if (conv) {
4119 ierr = MatDestroy(&B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4119,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4120 goto foundconv;
4121 }
4122 }
4123
4124 /* 3) See if a good general converter is registered for the desired class */
4125 conv = B->ops->convertfrom;
4126 ierr = PetscInfo2(mat,"Check convertfrom (%s) -> %d\n",((PetscObject)B)->type_name,!!conv)PetscInfo_Private(__func__,mat,"Check convertfrom (%s) -> %d\n"
,((PetscObject)B)->type_name,!!conv)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4126,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4127 ierr = MatDestroy(&B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4127,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4128 if (conv) goto foundconv;
4129
4130 /* 4) See if a good general converter is known for the current matrix */
4131 if (mat->ops->convert) {
4132 conv = mat->ops->convert;
4133 }
4134 ierr = PetscInfo2(mat,"Check general convert (%s) -> %d\n",((PetscObject)mat)->type_name,!!conv)PetscInfo_Private(__func__,mat,"Check general convert (%s) -> %d\n"
,((PetscObject)mat)->type_name,!!conv)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4134,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4135 if (conv) goto foundconv;
4136
4137 /* 5) Use a really basic converter. */
4138 ierr = PetscInfo(mat,"Using MatConvert_Basic\n")PetscInfo_Private(__func__,mat,"Using MatConvert_Basic\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4138,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4139 conv = MatConvert_Basic;
4140
4141foundconv:
4142 ierr = PetscLogEventBegin(MAT_Convert,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Convert].active) ? (*PetscLogPLB)((MAT_Convert),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4142,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4143 ierr = (*conv)(mat,newtype,reuse,M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4143,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4144 if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4145 /* the block sizes must be same if the mappings are copied over */
4146 (*M)->rmap->bs = mat->rmap->bs;
4147 (*M)->cmap->bs = mat->cmap->bs;
4148 ierr = PetscObjectReference((PetscObject)mat->rmap->mapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4148,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4149 ierr = PetscObjectReference((PetscObject)mat->cmap->mapping);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4149,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4150 (*M)->rmap->mapping = mat->rmap->mapping;
4151 (*M)->cmap->mapping = mat->cmap->mapping;
4152 }
4153 (*M)->stencil.dim = mat->stencil.dim;
4154 (*M)->stencil.noc = mat->stencil.noc;
4155 for (i=0; i<=mat->stencil.dim; i++) {
4156 (*M)->stencil.dims[i] = mat->stencil.dims[i];
4157 (*M)->stencil.starts[i] = mat->stencil.starts[i];
4158 }
4159 ierr = PetscLogEventEnd(MAT_Convert,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Convert].active) ? (*PetscLogPLE)((MAT_Convert),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4159,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4160 }
4161 ierr = PetscObjectStateIncrease((PetscObject)*M)(((PetscObject)*M)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4161,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4162
4163 /* Copy Mat options */
4164 if (mat->symmetric) {ierr = MatSetOption(*M,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4164,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4165 if (mat->hermitian) {ierr = MatSetOption(*M,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4165,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4166 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4167}
4168
4169/*@C
4170 MatFactorGetSolverType - Returns name of the package providing the factorization routines
4171
4172 Not Collective
4173
4174 Input Parameter:
4175. mat - the matrix, must be a factored matrix
4176
4177 Output Parameter:
4178. type - the string name of the package (do not free this string)
4179
4180 Notes:
4181 In Fortran you pass in a empty string and the package name will be copied into it.
4182 (Make sure the string is long enough)
4183
4184 Level: intermediate
4185
4186.seealso: MatCopy(), MatDuplicate(), MatGetFactorAvailable(), MatGetFactor()
4187@*/
4188PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4189{
4190 PetscErrorCode ierr, (*conv)(Mat,MatSolverType*);
4191
4192 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4192; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4193 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4193,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4193,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4193,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4193,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4194 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4194,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4195 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4195,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Only for factored matrix")
;
4196 ierr = PetscObjectQueryFunction((PetscObject)mat,"MatFactorGetSolverType_C",&conv)PetscObjectQueryFunction_Private(((PetscObject)mat),("MatFactorGetSolverType_C"
),(PetscVoidFunction*)(&conv))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4196,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4197 if (!conv) {
4198 *type = MATSOLVERPETSC"petsc";
4199 } else {
4200 ierr = (*conv)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4200,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4201 }
4202 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4203}
4204
4205typedef struct _MatSolverTypeForSpecifcType* MatSolverTypeForSpecifcType;
4206struct _MatSolverTypeForSpecifcType {
4207 MatType mtype;
4208 PetscErrorCode (*getfactor[4])(Mat,MatFactorType,Mat*);
4209 MatSolverTypeForSpecifcType next;
4210};
4211
4212typedef struct _MatSolverTypeHolder* MatSolverTypeHolder;
4213struct _MatSolverTypeHolder {
4214 char *name;
4215 MatSolverTypeForSpecifcType handlers;
4216 MatSolverTypeHolder next;
4217};
4218
4219static MatSolverTypeHolder MatSolverTypeHolders = NULL((void*)0);
4220
4221/*@C
4222 MatSolvePackageRegister - Registers a MatSolverType that works for a particular matrix type
4223
4224 Input Parameters:
4225+ package - name of the package, for example petsc or superlu
4226. mtype - the matrix type that works with this package
4227. ftype - the type of factorization supported by the package
4228- getfactor - routine that will create the factored matrix ready to be used
4229
4230 Level: intermediate
4231
4232.seealso: MatCopy(), MatDuplicate(), MatGetFactorAvailable()
4233@*/
4234PetscErrorCode MatSolverTypeRegister(MatSolverType package,MatType mtype,MatFactorType ftype,PetscErrorCode (*getfactor)(Mat,MatFactorType,Mat*))
4235{
4236 PetscErrorCode ierr;
4237 MatSolverTypeHolder next = MatSolverTypeHolders,prev = NULL((void*)0);
4238 PetscBool flg;
4239 MatSolverTypeForSpecifcType inext,iprev = NULL((void*)0);
4240
4241 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4241; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4242 ierr = MatInitializePackage();CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4242,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4243 if (!next) {
4244 ierr = PetscNew(&MatSolverTypeHolders)PetscMallocA(1,PETSC_TRUE,4244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&MatSolverTypeHolders))),((&MatSolverTypeHolders
)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4244,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4245 ierr = PetscStrallocpy(package,&MatSolverTypeHolders->name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4246 ierr = PetscNew(&MatSolverTypeHolders->handlers)PetscMallocA(1,PETSC_TRUE,4246,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&MatSolverTypeHolders->handlers
))),((&MatSolverTypeHolders->handlers)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4246,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4247 ierr = PetscStrallocpy(mtype,(char **)&MatSolverTypeHolders->handlers->mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4247,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4248 MatSolverTypeHolders->handlers->getfactor[(int)ftype-1] = getfactor;
4249 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4250 }
4251 while (next) {
4252 ierr = PetscStrcasecmp(package,next->name,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4252,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4253 if (flg) {
4254 if (!next->handlers) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatSolverTypeHolder is missing handlers")return PetscError(((MPI_Comm)0x44000001),4254,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,77,PETSC_ERROR_INITIAL,"MatSolverTypeHolder is missing handlers"
)
;
4255 inext = next->handlers;
4256 while (inext) {
4257 ierr = PetscStrcasecmp(mtype,inext->mtype,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4257,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4258 if (flg) {
4259 inext->getfactor[(int)ftype-1] = getfactor;
4260 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4261 }
4262 iprev = inext;
4263 inext = inext->next;
4264 }
4265 ierr = PetscNew(&iprev->next)PetscMallocA(1,PETSC_TRUE,4265,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&iprev->next))),((&iprev->
next)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4265,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4266 ierr = PetscStrallocpy(mtype,(char **)&iprev->next->mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4266,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4267 iprev->next->getfactor[(int)ftype-1] = getfactor;
4268 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4269 }
4270 prev = next;
4271 next = next->next;
4272 }
4273 ierr = PetscNew(&prev->next)PetscMallocA(1,PETSC_TRUE,4273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&prev->next))),((&prev->
next)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4274 ierr = PetscStrallocpy(package,&prev->next->name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4275 ierr = PetscNew(&prev->next->handlers)PetscMallocA(1,PETSC_TRUE,4275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&prev->next->handlers))),((
&prev->next->handlers)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4276 ierr = PetscStrallocpy(mtype,(char **)&prev->next->handlers->mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4277 prev->next->handlers->getfactor[(int)ftype-1] = getfactor;
4278 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4279}
4280
4281/*@C
4282 MatSolvePackageGet - Get's the function that creates the factor matrix if it exist
4283
4284 Input Parameters:
4285+ package - name of the package, for example petsc or superlu
4286. ftype - the type of factorization supported by the package
4287- mtype - the matrix type that works with this package
4288
4289 Output Parameters:
4290+ foundpackage - PETSC_TRUE if the package was registered
4291. foundmtype - PETSC_TRUE if the package supports the requested mtype
4292- getfactor - routine that will create the factored matrix ready to be used or NULL if not found
4293
4294 Level: intermediate
4295
4296.seealso: MatCopy(), MatDuplicate(), MatGetFactorAvailable()
4297@*/
4298PetscErrorCode MatSolverTypeGet(MatSolverType package,MatType mtype,MatFactorType ftype,PetscBool *foundpackage,PetscBool *foundmtype,PetscErrorCode (**getfactor)(Mat,MatFactorType,Mat*))
4299{
4300 PetscErrorCode ierr;
4301 MatSolverTypeHolder next = MatSolverTypeHolders;
4302 PetscBool flg;
4303 MatSolverTypeForSpecifcType inext;
4304
4305 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4305; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4306 if (foundpackage) *foundpackage = PETSC_FALSE;
4307 if (foundmtype) *foundmtype = PETSC_FALSE;
4308 if (getfactor) *getfactor = NULL((void*)0);
4309
4310 if (package) {
4311 while (next) {
4312 ierr = PetscStrcasecmp(package,next->name,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4312,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4313 if (flg) {
4314 if (foundpackage) *foundpackage = PETSC_TRUE;
4315 inext = next->handlers;
4316 while (inext) {
4317 ierr = PetscStrbeginswith(mtype,inext->mtype,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4317,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4318 if (flg) {
4319 if (foundmtype) *foundmtype = PETSC_TRUE;
4320 if (getfactor) *getfactor = inext->getfactor[(int)ftype-1];
4321 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4322 }
4323 inext = inext->next;
4324 }
4325 }
4326 next = next->next;
4327 }
4328 } else {
4329 while (next) {
4330 inext = next->handlers;
4331 while (inext) {
4332 ierr = PetscStrbeginswith(mtype,inext->mtype,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4332,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4333 if (flg && inext->getfactor[(int)ftype-1]) {
4334 if (foundpackage) *foundpackage = PETSC_TRUE;
4335 if (foundmtype) *foundmtype = PETSC_TRUE;
4336 if (getfactor) *getfactor = inext->getfactor[(int)ftype-1];
4337 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4338 }
4339 inext = inext->next;
4340 }
4341 next = next->next;
4342 }
4343 }
4344 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4345}
4346
4347PetscErrorCode MatSolverTypeDestroy(void)
4348{
4349 PetscErrorCode ierr;
4350 MatSolverTypeHolder next = MatSolverTypeHolders,prev;
4351 MatSolverTypeForSpecifcType inext,iprev;
4352
4353 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4353; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4354 while (next) {
4355 ierr = PetscFree(next->name)((*PetscTrFree)((void*)(next->name),4355,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((next->name) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4355,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4356 inext = next->handlers;
4357 while (inext) {
4358 ierr = PetscFree(inext->mtype)((*PetscTrFree)((void*)(inext->mtype),4358,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((inext->mtype) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4358,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4359 iprev = inext;
4360 inext = inext->next;
4361 ierr = PetscFree(iprev)((*PetscTrFree)((void*)(iprev),4361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((iprev) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4362 }
4363 prev = next;
4364 next = next->next;
4365 ierr = PetscFree(prev)((*PetscTrFree)((void*)(prev),4365,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((prev) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4365,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4366 }
4367 MatSolverTypeHolders = NULL((void*)0);
4368 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4369}
4370
4371/*@C
4372 MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic()
4373
4374 Collective on Mat
4375
4376 Input Parameters:
4377+ mat - the matrix
4378. type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4379- ftype - factor type, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ICC, MAT_FACTOR_ILU,
4380
4381 Output Parameters:
4382. f - the factor matrix used with MatXXFactorSymbolic() calls
4383
4384 Notes:
4385 Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4386 such as pastix, superlu, mumps etc.
4387
4388 PETSc must have been ./configure to use the external solver, using the option --download-package
4389
4390 Level: intermediate
4391
4392.seealso: MatCopy(), MatDuplicate(), MatGetFactorAvailable()
4393@*/
4394PetscErrorCode MatGetFactor(Mat mat, MatSolverType type,MatFactorType ftype,Mat *f)
4395{
4396 PetscErrorCode ierr,(*conv)(Mat,MatFactorType,Mat*);
4397 PetscBool foundpackage,foundmtype;
4398
4399 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4399; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4400 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4400,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4400,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4400,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4400,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4401 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4401,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4402
4403 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4403,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4404 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4404,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4405
4406 ierr = MatSolverTypeGet(type,((PetscObject)mat)->type_name,ftype,&foundpackage,&foundmtype,&conv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4406,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4407 if (!foundpackage) {
4408 if (type) {
4409 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"Could not locate solver package %s. Perhaps you must ./configure with --download-%s",type,type)return PetscError(PetscObjectComm((PetscObject)mat),4409,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",92,PETSC_ERROR_INITIAL
,"Could not locate solver package %s. Perhaps you must ./configure with --download-%s"
,type,type)
;
4410 } else {
4411 SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"Could not locate a solver package. Perhaps you must ./configure with --download-<package>")return PetscError(PetscObjectComm((PetscObject)mat),4411,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",92,PETSC_ERROR_INITIAL
,"Could not locate a solver package. Perhaps you must ./configure with --download-<package>"
)
;
4412 }
4413 }
4414
4415 if (!foundmtype) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"MatSolverType %s does not support matrix type %s",type,((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4415,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",92,PETSC_ERROR_INITIAL
,"MatSolverType %s does not support matrix type %s",type,((PetscObject
)mat)->type_name)
;
4416 if (!conv) SETERRQ3(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"MatSolverType %s does not support factorization type %s for matrix type %s",type,MatFactorTypes[ftype],((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4416,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",92,PETSC_ERROR_INITIAL
,"MatSolverType %s does not support factorization type %s for matrix type %s"
,type,MatFactorTypes[ftype],((PetscObject)mat)->type_name)
;
4417
4418#if defined(PETSC_USE_COMPLEX)
4419 if (mat->hermitian && !mat->symmetric && (ftype == MAT_FACTOR_CHOLESKY||ftype == MAT_FACTOR_ICC)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Hermitian CHOLESKY or ICC Factor is not supported")return PetscError(((MPI_Comm)0x44000001),4419,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Hermitian CHOLESKY or ICC Factor is not supported"
)
;
4420#endif
4421
4422 ierr = (*conv)(mat,ftype,f);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4422,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4423 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4424}
4425
4426/*@C
4427 MatGetFactorAvailable - Returns a a flag if matrix supports particular package and factor type
4428
4429 Not Collective
4430
4431 Input Parameters:
4432+ mat - the matrix
4433. type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4434- ftype - factor type, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ICC, MAT_FACTOR_ILU,
4435
4436 Output Parameter:
4437. flg - PETSC_TRUE if the factorization is available
4438
4439 Notes:
4440 Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4441 such as pastix, superlu, mumps etc.
4442
4443 PETSc must have been ./configure to use the external solver, using the option --download-package
4444
4445 Level: intermediate
4446
4447.seealso: MatCopy(), MatDuplicate(), MatGetFactor()
4448@*/
4449PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type,MatFactorType ftype,PetscBool *flg)
4450{
4451 PetscErrorCode ierr, (*gconv)(Mat,MatFactorType,Mat*);
4452
4453 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4453; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4454 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4454,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4454,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4454,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4454,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4455 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4455,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4456
4457 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4457,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4458 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4458,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4459
4460 *flg = PETSC_FALSE;
4461 ierr = MatSolverTypeGet(type,((PetscObject)mat)->type_name,ftype,NULL((void*)0),NULL((void*)0),&gconv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4461,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4462 if (gconv) {
4463 *flg = PETSC_TRUE;
4464 }
4465 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4466}
4467
4468#include <petscdmtypes.h>
4469
4470/*@
4471 MatDuplicate - Duplicates a matrix including the non-zero structure.
4472
4473 Collective on Mat
4474
4475 Input Parameters:
4476+ mat - the matrix
4477- op - One of MAT_DO_NOT_COPY_VALUES, MAT_COPY_VALUES, or MAT_SHARE_NONZERO_PATTERN.
4478 See the manual page for MatDuplicateOption for an explanation of these options.
4479
4480 Output Parameter:
4481. M - pointer to place new matrix
4482
4483 Level: intermediate
4484
4485 Notes:
4486 You cannot change the nonzero pattern for the parent or child matrix if you use MAT_SHARE_NONZERO_PATTERN.
4487 When original mat is a product of matrix operation, e.g., an output of MatMatMult() or MatCreateSubMatrix(), only the simple matrix data structure of mat is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated. User should not use MatDuplicate() to create new matrix M if M is intended to be reused as the product of matrix operation.
4488
4489.seealso: MatCopy(), MatConvert(), MatDuplicateOption
4490@*/
4491PetscErrorCode MatDuplicate(Mat mat,MatDuplicateOption op,Mat *M)
4492{
4493 PetscErrorCode ierr;
4494 Mat B;
4495 PetscInt i;
4496 DM dm;
4497 void (*viewf)(void);
4498
4499 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4499; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4500 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4500,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4500,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4500,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4500,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4501 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4501,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4502 PetscValidPointer(M,3)do { if (!M) return PetscError(((MPI_Comm)0x44000001),4502,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(M,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),4502,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
4503 if (op == MAT_COPY_VALUES && !mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"MAT_COPY_VALUES not allowed for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4503,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"MAT_COPY_VALUES not allowed for unassembled matrix")
;
4504 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4504,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4505 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4505,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4506
4507 *M = 0;
4508 if (!mat->ops->duplicate) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not written for this matrix type")return PetscError(PetscObjectComm((PetscObject)mat),4508,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Not written for this matrix type")
;
4509 ierr = PetscLogEventBegin(MAT_Convert,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Convert].active) ? (*PetscLogPLB)((MAT_Convert),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4509,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4510 ierr = (*mat->ops->duplicate)(mat,op,M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4510,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4511 B = *M;
4512
4513 ierr = MatGetOperation(mat,MATOP_VIEW,&viewf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4513,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4514 if (viewf) {
4515 ierr = MatSetOperation(B,MATOP_VIEW,viewf);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4515,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4516 }
4517
4518 B->stencil.dim = mat->stencil.dim;
4519 B->stencil.noc = mat->stencil.noc;
4520 for (i=0; i<=mat->stencil.dim; i++) {
4521 B->stencil.dims[i] = mat->stencil.dims[i];
4522 B->stencil.starts[i] = mat->stencil.starts[i];
4523 }
4524
4525 B->nooffproczerorows = mat->nooffproczerorows;
4526 B->nooffprocentries = mat->nooffprocentries;
4527
4528 ierr = PetscObjectQuery((PetscObject) mat, "__PETSc_dm", (PetscObject*) &dm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4528,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4529 if (dm) {
4530 ierr = PetscObjectCompose((PetscObject) B, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4530,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4531 }
4532 ierr = PetscLogEventEnd(MAT_Convert,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Convert].active) ? (*PetscLogPLE)((MAT_Convert),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4532,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4533 ierr = PetscObjectStateIncrease((PetscObject)B)(((PetscObject)B)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4533,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4534 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4535}
4536
4537/*@
4538 MatGetDiagonal - Gets the diagonal of a matrix.
4539
4540 Logically Collective on Mat
4541
4542 Input Parameters:
4543+ mat - the matrix
4544- v - the vector for storing the diagonal
4545
4546 Output Parameter:
4547. v - the diagonal of the matrix
4548
4549 Level: intermediate
4550
4551 Note:
4552 Currently only correct in parallel for square matrices.
4553
4554.seealso: MatGetRow(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMaxAbs()
4555@*/
4556PetscErrorCode MatGetDiagonal(Mat mat,Vec v)
4557{
4558 PetscErrorCode ierr;
4559
4560 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4560; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4561 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4561,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4561,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4561,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4561,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4562 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4562,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4563 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4563,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4563,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4563,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4563,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4564 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4564,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4565 if (!mat->ops->getdiagonal) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4565,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
4566 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4566,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4567
4568 ierr = (*mat->ops->getdiagonal)(mat,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4568,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4569 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4569,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4570 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4571}
4572
4573/*@C
4574 MatGetRowMin - Gets the minimum value (of the real part) of each
4575 row of the matrix
4576
4577 Logically Collective on Mat
4578
4579 Input Parameters:
4580. mat - the matrix
4581
4582 Output Parameter:
4583+ v - the vector for storing the maximums
4584- idx - the indices of the column found for each row (optional)
4585
4586 Level: intermediate
4587
4588 Notes:
4589 The result of this call are the same as if one converted the matrix to dense format
4590 and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4591
4592 This code is only implemented for a couple of matrix formats.
4593
4594.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMaxAbs(),
4595 MatGetRowMax()
4596@*/
4597PetscErrorCode MatGetRowMin(Mat mat,Vec v,PetscInt idx[])
4598{
4599 PetscErrorCode ierr;
4600
4601 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4601; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4602 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4602,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4602,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4602,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4602,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4603 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4603,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4604 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4604,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4604,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4604,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4604,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4605 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4605,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4606 if (!mat->ops->getrowmax) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),4606,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
4607 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4607,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4608
4609 ierr = (*mat->ops->getrowmin)(mat,v,idx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4609,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4610 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4610,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4611 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4612}
4613
4614/*@C
4615 MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
4616 row of the matrix
4617
4618 Logically Collective on Mat
4619
4620 Input Parameters:
4621. mat - the matrix
4622
4623 Output Parameter:
4624+ v - the vector for storing the minimums
4625- idx - the indices of the column found for each row (or NULL if not needed)
4626
4627 Level: intermediate
4628
4629 Notes:
4630 if a row is completely empty or has only 0.0 values then the idx[] value for that
4631 row is 0 (the first column).
4632
4633 This code is only implemented for a couple of matrix formats.
4634
4635.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMax(), MatGetRowMaxAbs(), MatGetRowMin()
4636@*/
4637PetscErrorCode MatGetRowMinAbs(Mat mat,Vec v,PetscInt idx[])
4638{
4639 PetscErrorCode ierr;
4640
4641 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4641; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4642 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4642,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4642,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4642,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4642,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4643 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4643,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4644 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4644,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4644,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4644,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4644,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4645 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4645,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4646 if (!mat->ops->getrowminabs) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4646,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
4647 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4647,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4648 if (idx) {ierr = PetscArrayzero(idx,mat->rmap->n)PetscMemzero(idx,(mat->rmap->n)*sizeof(*(idx)));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4648,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4649
4650 ierr = (*mat->ops->getrowminabs)(mat,v,idx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4650,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4651 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4651,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4652 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4653}
4654
4655/*@C
4656 MatGetRowMax - Gets the maximum value (of the real part) of each
4657 row of the matrix
4658
4659 Logically Collective on Mat
4660
4661 Input Parameters:
4662. mat - the matrix
4663
4664 Output Parameter:
4665+ v - the vector for storing the maximums
4666- idx - the indices of the column found for each row (optional)
4667
4668 Level: intermediate
4669
4670 Notes:
4671 The result of this call are the same as if one converted the matrix to dense format
4672 and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4673
4674 This code is only implemented for a couple of matrix formats.
4675
4676.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMaxAbs(), MatGetRowMin()
4677@*/
4678PetscErrorCode MatGetRowMax(Mat mat,Vec v,PetscInt idx[])
4679{
4680 PetscErrorCode ierr;
4681
4682 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4682; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4683 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4683,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4683,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4684 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4685 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4685,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4685,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4686 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4686,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4687 if (!mat->ops->getrowmax) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4687,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
4688 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4689
4690 ierr = (*mat->ops->getrowmax)(mat,v,idx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4690,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4691 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4691,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4692 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4693}
4694
4695/*@C
4696 MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
4697 row of the matrix
4698
4699 Logically Collective on Mat
4700
4701 Input Parameters:
4702. mat - the matrix
4703
4704 Output Parameter:
4705+ v - the vector for storing the maximums
4706- idx - the indices of the column found for each row (or NULL if not needed)
4707
4708 Level: intermediate
4709
4710 Notes:
4711 if a row is completely empty or has only 0.0 values then the idx[] value for that
4712 row is 0 (the first column).
4713
4714 This code is only implemented for a couple of matrix formats.
4715
4716.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMax(), MatGetRowMin()
4717@*/
4718PetscErrorCode MatGetRowMaxAbs(Mat mat,Vec v,PetscInt idx[])
4719{
4720 PetscErrorCode ierr;
4721
4722 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4722; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4723 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4723,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4723,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4723,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4723,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4724 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4724,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4725 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4725,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4725,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4725,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4725,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4726 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4726,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4727 if (!mat->ops->getrowmaxabs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),4727,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
4728 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4728,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4729 if (idx) {ierr = PetscArrayzero(idx,mat->rmap->n)PetscMemzero(idx,(mat->rmap->n)*sizeof(*(idx)));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4729,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4730
4731 ierr = (*mat->ops->getrowmaxabs)(mat,v,idx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4731,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4732 ierr = PetscObjectStateIncrease((PetscObject)v)(((PetscObject)v)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4732,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4733 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4734}
4735
4736/*@
4737 MatGetRowSum - Gets the sum of each row of the matrix
4738
4739 Logically or Neighborhood Collective on Mat
4740
4741 Input Parameters:
4742. mat - the matrix
4743
4744 Output Parameter:
4745. v - the vector for storing the sum of rows
4746
4747 Level: intermediate
4748
4749 Notes:
4750 This code is slow since it is not currently specialized for different formats
4751
4752.seealso: MatGetDiagonal(), MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRowMax(), MatGetRowMin()
4753@*/
4754PetscErrorCode MatGetRowSum(Mat mat, Vec v)
4755{
4756 Vec ones;
4757 PetscErrorCode ierr;
4758
4759 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4759; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4760 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4760,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4760,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4760,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4760,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4761 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4761,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4762 PetscValidHeaderSpecific(v,VEC_CLASSID,2)do { if (!v) return PetscError(((MPI_Comm)0x44000001),4762,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(v,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4762,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(v))->classid != VEC_CLASSID) { if (
((PetscObject)(v))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4762,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4762,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4763 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4763,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4764 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4764,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4765 ierr = MatCreateVecs(mat,&ones,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4765,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4766 ierr = VecSet(ones,1.);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4766,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4767 ierr = MatMult(mat,ones,v);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4767,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4768 ierr = VecDestroy(&ones);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4768,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4769 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4770}
4771
4772/*@
4773 MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
4774
4775 Collective on Mat
4776
4777 Input Parameter:
4778+ mat - the matrix to transpose
4779- reuse - either MAT_INITIAL_MATRIX, MAT_REUSE_MATRIX, or MAT_INPLACE_MATRIX
4780
4781 Output Parameters:
4782. B - the transpose
4783
4784 Notes:
4785 If you use MAT_INPLACE_MATRIX then you must pass in &mat for B
4786
4787 MAT_REUSE_MATRIX causes the B matrix from a previous call to this function with MAT_INITIAL_MATRIX to be used
4788
4789 Consider using MatCreateTranspose() instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
4790
4791 Level: intermediate
4792
4793.seealso: MatMultTranspose(), MatMultTransposeAdd(), MatIsTranspose(), MatReuse
4794@*/
4795PetscErrorCode MatTranspose(Mat mat,MatReuse reuse,Mat *B)
4796{
4797 PetscErrorCode ierr;
4798
4799 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4799; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4800 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4800,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4800,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4800,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4800,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4801 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4801,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4802 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4802,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4803 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4803,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4804 if (!mat->ops->transpose) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),4804,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
4805 if (reuse == MAT_INPLACE_MATRIX && mat != *B) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_INPLACE_MATRIX requires last matrix to match first")return PetscError(PetscObjectComm((PetscObject)mat),4805,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MAT_INPLACE_MATRIX requires last matrix to match first")
;
4806 if (reuse == MAT_REUSE_MATRIX && mat == *B) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Perhaps you mean MAT_INPLACE_MATRIX")return PetscError(PetscObjectComm((PetscObject)mat),4806,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Perhaps you mean MAT_INPLACE_MATRIX")
;
4807 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4807,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4808
4809 ierr = PetscLogEventBegin(MAT_Transpose,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Transpose].active) ? (*PetscLogPLB)((MAT_Transpose),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4809,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4810 ierr = (*mat->ops->transpose)(mat,reuse,B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4810,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4811 ierr = PetscLogEventEnd(MAT_Transpose,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Transpose].active) ? (*PetscLogPLE)((MAT_Transpose),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4812 if (B) {ierr = PetscObjectStateIncrease((PetscObject)*B)(((PetscObject)*B)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4812,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
4813 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4814}
4815
4816/*@
4817 MatIsTranspose - Test whether a matrix is another one's transpose,
4818 or its own, in which case it tests symmetry.
4819
4820 Collective on Mat
4821
4822 Input Parameter:
4823+ A - the matrix to test
4824- B - the matrix to test against, this can equal the first parameter
4825
4826 Output Parameters:
4827. flg - the result
4828
4829 Notes:
4830 Only available for SeqAIJ/MPIAIJ matrices. The sequential algorithm
4831 has a running time of the order of the number of nonzeros; the parallel
4832 test involves parallel copies of the block-offdiagonal parts of the matrix.
4833
4834 Level: intermediate
4835
4836.seealso: MatTranspose(), MatIsSymmetric(), MatIsHermitian()
4837@*/
4838PetscErrorCode MatIsTranspose(Mat A,Mat B,PetscReal tol,PetscBool *flg)
4839{
4840 PetscErrorCode ierr,(*f)(Mat,Mat,PetscReal,PetscBool*),(*g)(Mat,Mat,PetscReal,PetscBool*);
4841
4842 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4842; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4843 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),4843,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4843,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4843,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4843,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4844 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),4844,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4844,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4844,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4844,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4845 PetscValidBoolPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),4845,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),4845,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,3); } while (0)
;
4846 ierr = PetscObjectQueryFunction((PetscObject)A,"MatIsTranspose_C",&f)PetscObjectQueryFunction_Private(((PetscObject)A),("MatIsTranspose_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4846,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4847 ierr = PetscObjectQueryFunction((PetscObject)B,"MatIsTranspose_C",&g)PetscObjectQueryFunction_Private(((PetscObject)B),("MatIsTranspose_C"
),(PetscVoidFunction*)(&g))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4847,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4848 *flg = PETSC_FALSE;
4849 if (f && g) {
4850 if (f == g) {
4851 ierr = (*f)(A,B,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4851,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4852 } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_NOTSAMETYPE,"Matrices do not have the same comparator for symmetry test")return PetscError(PetscObjectComm((PetscObject)A),4852,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",69,PETSC_ERROR_INITIAL
,"Matrices do not have the same comparator for symmetry test"
)
;
4853 } else {
4854 MatType mattype;
4855 if (!f) {
4856 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4856,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4857 } else {
4858 ierr = MatGetType(B,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4858,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4859 }
4860 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for transpose",mattype)return PetscError(((MPI_Comm)0x44000001),4860,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for transpose"
,mattype)
;
4861 }
4862 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4863}
4864
4865/*@
4866 MatHermitianTranspose - Computes an in-place or out-of-place transpose of a matrix in complex conjugate.
4867
4868 Collective on Mat
4869
4870 Input Parameter:
4871+ mat - the matrix to transpose and complex conjugate
4872- reuse - MAT_INITIAL_MATRIX to create a new matrix, MAT_INPLACE_MATRIX to reuse the first argument to store the transpose
4873
4874 Output Parameters:
4875. B - the Hermitian
4876
4877 Level: intermediate
4878
4879.seealso: MatTranspose(), MatMultTranspose(), MatMultTransposeAdd(), MatIsTranspose(), MatReuse
4880@*/
4881PetscErrorCode MatHermitianTranspose(Mat mat,MatReuse reuse,Mat *B)
4882{
4883 PetscErrorCode ierr;
4884
4885 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4885; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4886 ierr = MatTranspose(mat,reuse,B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4886,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4887#if defined(PETSC_USE_COMPLEX)
4888 ierr = MatConjugate(*B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4888,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4889#endif
4890 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4891}
4892
4893/*@
4894 MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
4895
4896 Collective on Mat
4897
4898 Input Parameter:
4899+ A - the matrix to test
4900- B - the matrix to test against, this can equal the first parameter
4901
4902 Output Parameters:
4903. flg - the result
4904
4905 Notes:
4906 Only available for SeqAIJ/MPIAIJ matrices. The sequential algorithm
4907 has a running time of the order of the number of nonzeros; the parallel
4908 test involves parallel copies of the block-offdiagonal parts of the matrix.
4909
4910 Level: intermediate
4911
4912.seealso: MatTranspose(), MatIsSymmetric(), MatIsHermitian(), MatIsTranspose()
4913@*/
4914PetscErrorCode MatIsHermitianTranspose(Mat A,Mat B,PetscReal tol,PetscBool *flg)
4915{
4916 PetscErrorCode ierr,(*f)(Mat,Mat,PetscReal,PetscBool*),(*g)(Mat,Mat,PetscReal,PetscBool*);
4917
4918 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4918; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4919 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),4919,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4919,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4919,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4919,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4920 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),4920,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4920,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4920,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4920,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4921 PetscValidBoolPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),4921,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),4921,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,3); } while (0)
;
4922 ierr = PetscObjectQueryFunction((PetscObject)A,"MatIsHermitianTranspose_C",&f)PetscObjectQueryFunction_Private(((PetscObject)A),("MatIsHermitianTranspose_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4922,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4923 ierr = PetscObjectQueryFunction((PetscObject)B,"MatIsHermitianTranspose_C",&g)PetscObjectQueryFunction_Private(((PetscObject)B),("MatIsHermitianTranspose_C"
),(PetscVoidFunction*)(&g))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4923,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4924 if (f && g) {
4925 if (f==g) {
4926 ierr = (*f)(A,B,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4926,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4927 } else SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_NOTSAMETYPE,"Matrices do not have the same comparator for Hermitian test")return PetscError(PetscObjectComm((PetscObject)A),4927,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",69,PETSC_ERROR_INITIAL
,"Matrices do not have the same comparator for Hermitian test"
)
;
4928 }
4929 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4930}
4931
4932/*@
4933 MatPermute - Creates a new matrix with rows and columns permuted from the
4934 original.
4935
4936 Collective on Mat
4937
4938 Input Parameters:
4939+ mat - the matrix to permute
4940. row - row permutation, each processor supplies only the permutation for its rows
4941- col - column permutation, each processor supplies only the permutation for its columns
4942
4943 Output Parameters:
4944. B - the permuted matrix
4945
4946 Level: advanced
4947
4948 Note:
4949 The index sets map from row/col of permuted matrix to row/col of original matrix.
4950 The index sets should be on the same communicator as Mat and have the same local sizes.
4951
4952.seealso: MatGetOrdering(), ISAllGather()
4953
4954@*/
4955PetscErrorCode MatPermute(Mat mat,IS row,IS col,Mat *B)
4956{
4957 PetscErrorCode ierr;
4958
4959 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4959; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4960 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),4960,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4960,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4960,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4960,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4961 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),4961,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
4962 PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),4962,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4962,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4962,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4962,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4963 PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),4963,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),4963,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),4963,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),4963,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
4964 PetscValidPointer(B,4)do { if (!B) return PetscError(((MPI_Comm)0x44000001),4964,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",4); if (!PetscCheckPointer(B,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),4964,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",4); } while (0)
;
4965 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),4965,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
4966 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),4966,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
4967 if (!mat->ops->permute) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatPermute not available for Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),4967,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"MatPermute not available for Mat type %s"
,((PetscObject)mat)->type_name)
;
4968 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),4968,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
4969
4970 ierr = (*mat->ops->permute)(mat,row,col,B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4970,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4971 ierr = PetscObjectStateIncrease((PetscObject)*B)(((PetscObject)*B)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),4971,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
4972 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
4973}
4974
4975/*@
4976 MatEqual - Compares two matrices.
4977
4978 Collective on Mat
4979
4980 Input Parameters:
4981+ A - the first matrix
4982- B - the second matrix
4983
4984 Output Parameter:
4985. flg - PETSC_TRUE if the matrices are equal; PETSC_FALSE otherwise.
4986
4987 Level: intermediate
4988
4989@*/
4990PetscErrorCode MatEqual(Mat A,Mat B,PetscBool *flg)
4991{
4992 PetscErrorCode ierr;
4993
4994 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 4994; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
4995 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),4995,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4995,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4995,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),4995,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
4996 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),4996,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),4996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),4996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),4996,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
4997 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),4997,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
4998 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),4998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
4999 PetscValidBoolPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),4999,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),4999,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,3); } while (0)
;
5000 PetscCheckSameComm(A,1,B,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)A),PetscObjectComm((PetscObject
)B),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),5000,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),5000,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
5001 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5001,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
5002 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),5002,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5003 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),5003,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5004 if (A->rmap->N != B->rmap->N || A->cmap->N != B->cmap->N) SETERRQ4(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %D %D %D %D",A->rmap->N,B->rmap->N,A->cmap->N,B->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),5004,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Mat A,Mat B: global dim %D %D %D %D",A->rmap->N,B->
rmap->N,A->cmap->N,B->cmap->N)
;
5005 if (!A->ops->equal) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),5005,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)A)->type_name)
;
5006 if (!B->ops->equal) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type %s",((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),5006,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)B)->type_name)
;
5007 if (A->ops->equal != B->ops->equal) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"A is type: %s\nB is type: %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),5007,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"A is type: %s\nB is type: %s",((PetscObject)A)->type_name
,((PetscObject)B)->type_name)
;
5008 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5008,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
5009
5010 ierr = (*A->ops->equal)(A,B,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5010,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5011 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5012}
5013
5014/*@
5015 MatDiagonalScale - Scales a matrix on the left and right by diagonal
5016 matrices that are stored as vectors. Either of the two scaling
5017 matrices can be NULL.
5018
5019 Collective on Mat
5020
5021 Input Parameters:
5022+ mat - the matrix to be scaled
5023. l - the left scaling vector (or NULL)
5024- r - the right scaling vector (or NULL)
5025
5026 Notes:
5027 MatDiagonalScale() computes A = LAR, where
5028 L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5029 The L scales the rows of the matrix, the R scales the columns of the matrix.
5030
5031 Level: intermediate
5032
5033
5034.seealso: MatScale(), MatShift(), MatDiagonalSet()
5035@*/
5036PetscErrorCode MatDiagonalScale(Mat mat,Vec l,Vec r)
5037{
5038 PetscErrorCode ierr;
5039
5040 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5040; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5041 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5041,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5041,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5041,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5041,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5042 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5042,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5043 if (!mat->ops->diagonalscale) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5043,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5044 if (l) {PetscValidHeaderSpecific(l,VEC_CLASSID,2)do { if (!l) return PetscError(((MPI_Comm)0x44000001),5044,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(l,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),5044,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(l))->classid != VEC_CLASSID) { if (
((PetscObject)(l))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5044,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),5044,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;PetscCheckSameComm(mat,1,l,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)l),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),5044,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),5044,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;}
5045 if (r) {PetscValidHeaderSpecific(r,VEC_CLASSID,3)do { if (!r) return PetscError(((MPI_Comm)0x44000001),5045,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(r,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),5045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(r))->classid != VEC_CLASSID) { if (
((PetscObject)(r))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),5045,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;PetscCheckSameComm(mat,1,r,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)r),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),5045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),5045,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;}
5046 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5046,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5047 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5047,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5048 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5048,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5049
5050 ierr = PetscLogEventBegin(MAT_Scale,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLB)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5050,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5051 ierr = (*mat->ops->diagonalscale)(mat,l,r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5051,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5052 ierr = PetscLogEventEnd(MAT_Scale,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLE)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5052,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5053 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5053,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5054 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5055}
5056
5057/*@
5058 MatScale - Scales all elements of a matrix by a given number.
5059
5060 Logically Collective on Mat
5061
5062 Input Parameters:
5063+ mat - the matrix to be scaled
5064- a - the scaling value
5065
5066 Output Parameter:
5067. mat - the scaled matrix
5068
5069 Level: intermediate
5070
5071.seealso: MatDiagonalScale()
5072@*/
5073PetscErrorCode MatScale(Mat mat,PetscScalar a)
5074{
5075 PetscErrorCode ierr;
5076
5077 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5077; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5078 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5078,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5078,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5078,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5078,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5079 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5079,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5080 if (a != (PetscScalar)1.0 && !mat->ops->scale) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),5080,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
5081 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5081,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5082 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5082,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5083 PetscValidLogicalCollectiveScalar(mat,a,2)do { PetscErrorCode _7_ierr; PetscReal b1[5],b2[5]; if (PetscIsNanScalar
(a)) {b1[4] = 1;} else {b1[4] = 0;}; b1[0] = -(a); b1[1] = (a
); b1[2] = -((PetscReal)0); b1[3] = ((PetscReal)0); _7_ierr =
((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(5),(((MPI_Datatype
)0x4c00080b)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),5083,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (!(b2[4] >
0) && !(PetscEqualReal(-b2[0],b2[1]) && PetscEqualReal
(-b2[2],b2[3]))) return PetscError(PetscObjectComm((PetscObject
)mat),5083,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Scalar value must be same on all processes, argument # %d"
,2); } while (0)
;
5084 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5084,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5085
5086 ierr = PetscLogEventBegin(MAT_Scale,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLB)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5086,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5087 if (a != (PetscScalar)1.0) {
5088 ierr = (*mat->ops->scale)(mat,a);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5088,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5089 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5089,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5090 }
5091 ierr = PetscLogEventEnd(MAT_Scale,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLE)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5091,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5092 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5093}
5094
5095/*@
5096 MatNorm - Calculates various norms of a matrix.
5097
5098 Collective on Mat
5099
5100 Input Parameters:
5101+ mat - the matrix
5102- type - the type of norm, NORM_1, NORM_FROBENIUS, NORM_INFINITY
5103
5104 Output Parameters:
5105. nrm - the resulting norm
5106
5107 Level: intermediate
5108
5109@*/
5110PetscErrorCode MatNorm(Mat mat,NormType type,PetscReal *nrm)
5111{
5112 PetscErrorCode ierr;
5113
5114 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5114; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5115 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5115,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5115,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5115,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5115,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5116 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5116,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5117 PetscValidScalarPointer(nrm,3)do { if (!nrm) return PetscError(((MPI_Comm)0x44000001),5117,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(nrm,PETSC_DOUBLE)) return PetscError(((MPI_Comm
)0x44000001),5117,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscScalar: Parameter # %d"
,3); } while (0)
;
5118
5119 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5119,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5120 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5120,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5121 if (!mat->ops->norm) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5121,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5122 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5122,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5123
5124 ierr = (*mat->ops->norm)(mat,type,nrm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5124,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5125 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5126}
5127
5128/*
5129 This variable is used to prevent counting of MatAssemblyBegin() that
5130 are called from within a MatAssemblyEnd().
5131*/
5132static PetscInt MatAssemblyEnd_InUse = 0;
5133/*@
5134 MatAssemblyBegin - Begins assembling the matrix. This routine should
5135 be called after completing all calls to MatSetValues().
5136
5137 Collective on Mat
5138
5139 Input Parameters:
5140+ mat - the matrix
5141- type - type of assembly, either MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY
5142
5143 Notes:
5144 MatSetValues() generally caches the values. The matrix is ready to
5145 use only after MatAssemblyBegin() and MatAssemblyEnd() have been called.
5146 Use MAT_FLUSH_ASSEMBLY when switching between ADD_VALUES and INSERT_VALUES
5147 in MatSetValues(); use MAT_FINAL_ASSEMBLY for the final assembly before
5148 using the matrix.
5149
5150 ALL processes that share a matrix MUST call MatAssemblyBegin() and MatAssemblyEnd() the SAME NUMBER of times, and each time with the
5151 same flag of MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY for all processes. Thus you CANNOT locally change from ADD_VALUES to INSERT_VALUES, that is
5152 a global collective operation requring all processes that share the matrix.
5153
5154 Space for preallocated nonzeros that is not filled by a call to MatSetValues() or a related routine are compressed
5155 out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5156 before MAT_FINAL_ASSEMBLY so the space is not compressed out.
5157
5158 Level: beginner
5159
5160.seealso: MatAssemblyEnd(), MatSetValues(), MatAssembled()
5161@*/
5162PetscErrorCode MatAssemblyBegin(Mat mat,MatAssemblyType type)
5163{
5164 PetscErrorCode ierr;
5165
5166 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5166; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5167 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5167,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5167,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5167,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5167,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5168 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5168,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5169 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5169,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5170 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix.\nDid you forget to call MatSetUnfactored()?")return PetscError(PetscObjectComm((PetscObject)mat),5170,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix.\nDid you forget to call MatSetUnfactored()?"
)
;
5171 if (mat->assembled) {
5172 mat->was_assembled = PETSC_TRUE;
5173 mat->assembled = PETSC_FALSE;
5174 }
5175
5176 if (!MatAssemblyEnd_InUse) {
5177 ierr = PetscLogEventBegin(MAT_AssemblyBegin,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_AssemblyBegin].active) ? (*PetscLogPLB)((MAT_AssemblyBegin
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5177,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5178 if (mat->ops->assemblybegin) {ierr = (*mat->ops->assemblybegin)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5178,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
5179 ierr = PetscLogEventEnd(MAT_AssemblyBegin,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_AssemblyBegin].active) ? (*PetscLogPLE)((MAT_AssemblyBegin
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5179,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5180 } else if (mat->ops->assemblybegin) {
5181 ierr = (*mat->ops->assemblybegin)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5181,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5182 }
5183 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5184}
5185
5186/*@
5187 MatAssembled - Indicates if a matrix has been assembled and is ready for
5188 use; for example, in matrix-vector product.
5189
5190 Not Collective
5191
5192 Input Parameter:
5193. mat - the matrix
5194
5195 Output Parameter:
5196. assembled - PETSC_TRUE or PETSC_FALSE
5197
5198 Level: advanced
5199
5200.seealso: MatAssemblyEnd(), MatSetValues(), MatAssemblyBegin()
5201@*/
5202PetscErrorCode MatAssembled(Mat mat,PetscBool *assembled)
5203{
5204 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5204; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5205 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5205,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5205,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5206 PetscValidPointer(assembled,2)do { if (!assembled) return PetscError(((MPI_Comm)0x44000001)
,5206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(assembled,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),5206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
5207 *assembled = mat->assembled;
5208 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5209}
5210
5211/*@
5212 MatAssemblyEnd - Completes assembling the matrix. This routine should
5213 be called after MatAssemblyBegin().
5214
5215 Collective on Mat
5216
5217 Input Parameters:
5218+ mat - the matrix
5219- type - type of assembly, either MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY
5220
5221 Options Database Keys:
5222+ -mat_view ::ascii_info - Prints info on matrix at conclusion of MatEndAssembly()
5223. -mat_view ::ascii_info_detail - Prints more detailed info
5224. -mat_view - Prints matrix in ASCII format
5225. -mat_view ::ascii_matlab - Prints matrix in Matlab format
5226. -mat_view draw - PetscDraws nonzero structure of matrix, using MatView() and PetscDrawOpenX().
5227. -display <name> - Sets display name (default is host)
5228. -draw_pause <sec> - Sets number of seconds to pause after display
5229. -mat_view socket - Sends matrix to socket, can be accessed from Matlab (See Users-Manual: ch_matlab )
5230. -viewer_socket_machine <machine> - Machine to use for socket
5231. -viewer_socket_port <port> - Port number to use for socket
5232- -mat_view binary:filename[:append] - Save matrix to file in binary format
5233
5234 Notes:
5235 MatSetValues() generally caches the values. The matrix is ready to
5236 use only after MatAssemblyBegin() and MatAssemblyEnd() have been called.
5237 Use MAT_FLUSH_ASSEMBLY when switching between ADD_VALUES and INSERT_VALUES
5238 in MatSetValues(); use MAT_FINAL_ASSEMBLY for the final assembly before
5239 using the matrix.
5240
5241 Space for preallocated nonzeros that is not filled by a call to MatSetValues() or a related routine are compressed
5242 out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5243 before MAT_FINAL_ASSEMBLY so the space is not compressed out.
5244
5245 Level: beginner
5246
5247.seealso: MatAssemblyBegin(), MatSetValues(), PetscDrawOpenX(), PetscDrawCreate(), MatView(), MatAssembled(), PetscViewerSocketOpen()
5248@*/
5249PetscErrorCode MatAssemblyEnd(Mat mat,MatAssemblyType type)
5250{
5251 PetscErrorCode ierr;
5252 static PetscInt inassm = 0;
5253 PetscBool flg = PETSC_FALSE;
5254
5255 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5255; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5256 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5256,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5256,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5256,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5256,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5257 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5257,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5258
5259 inassm++;
5260 MatAssemblyEnd_InUse++;
5261 if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5262 ierr = PetscLogEventBegin(MAT_AssemblyEnd,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_AssemblyEnd].active) ? (*PetscLogPLB)((MAT_AssemblyEnd),
0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5262,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5263 if (mat->ops->assemblyend) {
5264 ierr = (*mat->ops->assemblyend)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5264,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5265 }
5266 ierr = PetscLogEventEnd(MAT_AssemblyEnd,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_AssemblyEnd].active) ? (*PetscLogPLE)((MAT_AssemblyEnd),
0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5266,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5267 } else if (mat->ops->assemblyend) {
5268 ierr = (*mat->ops->assemblyend)(mat,type);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5268,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5269 }
5270
5271 /* Flush assembly is not a true assembly */
5272 if (type != MAT_FLUSH_ASSEMBLY) {
5273 mat->num_ass++;
5274 mat->assembled = PETSC_TRUE;
5275 mat->ass_nonzerostate = mat->nonzerostate;
5276 }
5277
5278 mat->insertmode = NOT_SET_VALUES;
5279 MatAssemblyEnd_InUse--;
5280 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5280,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5281 if (!mat->symmetric_eternal) {
5282 mat->symmetric_set = PETSC_FALSE;
5283 mat->hermitian_set = PETSC_FALSE;
5284 mat->structurally_symmetric_set = PETSC_FALSE;
5285 }
5286 if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5287 ierr = MatViewFromOptions(mat,NULL((void*)0),"-mat_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5287,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5288
5289 if (mat->checksymmetryonassembly) {
5290 ierr = MatIsSymmetric(mat,mat->checksymmetrytol,&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5290,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5291 if (flg) {
5292 ierr = PetscPrintf(PetscObjectComm((PetscObject)mat),"Matrix is symmetric (tolerance %g)\n",(double)mat->checksymmetrytol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5292,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5293 } else {
5294 ierr = PetscPrintf(PetscObjectComm((PetscObject)mat),"Matrix is not symmetric (tolerance %g)\n",(double)mat->checksymmetrytol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5295 }
5296 }
5297 if (mat->nullsp && mat->checknullspaceonassembly) {
5298 ierr = MatNullSpaceTest(mat->nullsp,mat,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5298,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5299 }
5300 }
5301 inassm--;
5302 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5303}
5304
5305/*@
5306 MatSetOption - Sets a parameter option for a matrix. Some options
5307 may be specific to certain storage formats. Some options
5308 determine how values will be inserted (or added). Sorted,
5309 row-oriented input will generally assemble the fastest. The default
5310 is row-oriented.
5311
5312 Logically Collective on Mat for certain operations, such as MAT_SPD, not collective for MAT_ROW_ORIENTED, see MatOption
5313
5314 Input Parameters:
5315+ mat - the matrix
5316. option - the option, one of those listed below (and possibly others),
5317- flg - turn the option on (PETSC_TRUE) or off (PETSC_FALSE)
5318
5319 Options Describing Matrix Structure:
5320+ MAT_SPD - symmetric positive definite
5321. MAT_SYMMETRIC - symmetric in terms of both structure and value
5322. MAT_HERMITIAN - transpose is the complex conjugation
5323. MAT_STRUCTURALLY_SYMMETRIC - symmetric nonzero structure
5324- MAT_SYMMETRY_ETERNAL - if you would like the symmetry/Hermitian flag
5325 you set to be kept with all future use of the matrix
5326 including after MatAssemblyBegin/End() which could
5327 potentially change the symmetry structure, i.e. you
5328 KNOW the matrix will ALWAYS have the property you set.
5329
5330
5331 Options For Use with MatSetValues():
5332 Insert a logically dense subblock, which can be
5333. MAT_ROW_ORIENTED - row-oriented (default)
5334
5335 Note these options reflect the data you pass in with MatSetValues(); it has
5336 nothing to do with how the data is stored internally in the matrix
5337 data structure.
5338
5339 When (re)assembling a matrix, we can restrict the input for
5340 efficiency/debugging purposes. These options include:
5341+ MAT_NEW_NONZERO_LOCATIONS - additional insertions will be allowed if they generate a new nonzero (slow)
5342. MAT_NEW_DIAGONALS - new diagonals will be allowed (for block diagonal format only)
5343. MAT_IGNORE_OFF_PROC_ENTRIES - drops off-processor entries
5344. MAT_NEW_NONZERO_LOCATION_ERR - generates an error for new matrix entry
5345. MAT_USE_HASH_TABLE - uses a hash table to speed up matrix assembly
5346. MAT_NO_OFF_PROC_ENTRIES - you know each process will only set values for its own rows, will generate an error if
5347 any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5348 performance for very large process counts.
5349- MAT_SUBSET_OFF_PROC_ENTRIES - you know that the first assembly after setting this flag will set a superset
5350 of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5351 functions, instead sending only neighbor messages.
5352
5353 Notes:
5354 Except for MAT_UNUSED_NONZERO_LOCATION_ERR and MAT_ROW_ORIENTED all processes that share the matrix must pass the same value in flg!
5355
5356 Some options are relevant only for particular matrix types and
5357 are thus ignored by others. Other options are not supported by
5358 certain matrix types and will generate an error message if set.
5359
5360 If using a Fortran 77 module to compute a matrix, one may need to
5361 use the column-oriented option (or convert to the row-oriented
5362 format).
5363
5364 MAT_NEW_NONZERO_LOCATIONS set to PETSC_FALSE indicates that any add or insertion
5365 that would generate a new entry in the nonzero structure is instead
5366 ignored. Thus, if memory has not alredy been allocated for this particular
5367 data, then the insertion is ignored. For dense matrices, in which
5368 the entire array is allocated, no entries are ever ignored.
5369 Set after the first MatAssemblyEnd(). If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5370
5371 MAT_NEW_NONZERO_LOCATION_ERR set to PETSC_TRUE indicates that any add or insertion
5372 that would generate a new entry in the nonzero structure instead produces
5373 an error. (Currently supported for AIJ and BAIJ formats only.) If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5374
5375 MAT_NEW_NONZERO_ALLOCATION_ERR set to PETSC_TRUE indicates that any add or insertion
5376 that would generate a new entry that has not been preallocated will
5377 instead produce an error. (Currently supported for AIJ and BAIJ formats
5378 only.) This is a useful flag when debugging matrix memory preallocation.
5379 If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5380
5381 MAT_IGNORE_OFF_PROC_ENTRIES set to PETSC_TRUE indicates entries destined for
5382 other processors should be dropped, rather than stashed.
5383 This is useful if you know that the "owning" processor is also
5384 always generating the correct matrix entries, so that PETSc need
5385 not transfer duplicate entries generated on another processor.
5386
5387 MAT_USE_HASH_TABLE indicates that a hash table be used to improve the
5388 searches during matrix assembly. When this flag is set, the hash table
5389 is created during the first Matrix Assembly. This hash table is
5390 used the next time through, during MatSetVaules()/MatSetVaulesBlocked()
5391 to improve the searching of indices. MAT_NEW_NONZERO_LOCATIONS flag
5392 should be used with MAT_USE_HASH_TABLE flag. This option is currently
5393 supported by MATMPIBAIJ format only.
5394
5395 MAT_KEEP_NONZERO_PATTERN indicates when MatZeroRows() is called the zeroed entries
5396 are kept in the nonzero structure
5397
5398 MAT_IGNORE_ZERO_ENTRIES - for AIJ/IS matrices this will stop zero values from creating
5399 a zero location in the matrix
5400
5401 MAT_USE_INODES - indicates using inode version of the code - works with AIJ matrix types
5402
5403 MAT_NO_OFF_PROC_ZERO_ROWS - you know each process will only zero its own rows. This avoids all reductions in the
5404 zero row routines and thus improves performance for very large process counts.
5405
5406 MAT_IGNORE_LOWER_TRIANGULAR - For SBAIJ matrices will ignore any insertions you make in the lower triangular
5407 part of the matrix (since they should match the upper triangular part).
5408
5409 MAT_SORTED_FULL - each process provides exactly its local rows; all column indices for a given row are passed in a
5410 single call to MatSetValues(), preallocation is perfect, row oriented, INSERT_VALUES is used. Common
5411 with finite difference schemes with non-periodic boundary conditions.
5412 Notes:
5413 Can only be called after MatSetSizes() and MatSetType() have been set.
5414
5415 Level: intermediate
5416
5417.seealso: MatOption, Mat
5418
5419@*/
5420PetscErrorCode MatSetOption(Mat mat,MatOption op,PetscBool flg)
5421{
5422 PetscErrorCode ierr;
5423
5424 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5424; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5425 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5425,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5425,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5425,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5425,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5426 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5426,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5427 if (op > 0) {
5428 PetscValidLogicalCollectiveEnum(mat,op,2)do { PetscErrorCode _7_ierr; PetscMPIInt b1[2],b2[2]; b1[0] =
-(PetscMPIInt)op; b1[1] = (PetscMPIInt)op; _7_ierr = (PetscAllreduceBarrierCheck
(PetscObjectComm((PetscObject)mat),2,5428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),5428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),5428
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Enum value must be same on all processes, argument # %d"
,2); } while (0)
;
5429 PetscValidLogicalCollectiveBool(mat,flg,3)do { PetscErrorCode _7_ierr; PetscMPIInt b1[2],b2[2]; b1[0] =
-(PetscMPIInt)flg; b1[1] = (PetscMPIInt)flg; _7_ierr = (PetscAllreduceBarrierCheck
(PetscObjectComm((PetscObject)mat),2,5429,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),5429,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),5429
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Bool value must be same on all processes, argument # %d"
,3); } while (0)
;
5430 }
5431
5432 if (((int) op) <= MAT_OPTION_MIN || ((int) op) >= MAT_OPTION_MAX) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Options %d is out of range",(int)op)return PetscError(PetscObjectComm((PetscObject)mat),5432,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Options %d is out of range",(int)op)
;
5433 if (!((PetscObject)mat)->type_name) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_TYPENOTSET,"Cannot set options until type and size have been set, see MatSetType() and MatSetSizes()")return PetscError(PetscObjectComm((PetscObject)mat),5433,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",89,PETSC_ERROR_INITIAL
,"Cannot set options until type and size have been set, see MatSetType() and MatSetSizes()"
)
;
5434
5435 switch (op) {
5436 case MAT_NO_OFF_PROC_ENTRIES:
5437 mat->nooffprocentries = flg;
5438 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5439 break;
5440 case MAT_SUBSET_OFF_PROC_ENTRIES:
5441 mat->assembly_subset = flg;
5442 if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
5443#if !defined(PETSC_HAVE_MPIUNI)
5444 ierr = MatStashScatterDestroy_BTS(&mat->stash);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5444,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5445#endif
5446 mat->stash.first_assembly_done = PETSC_FALSE;
5447 }
5448 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5449 case MAT_NO_OFF_PROC_ZERO_ROWS:
5450 mat->nooffproczerorows = flg;
5451 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5452 break;
5453 case MAT_SPD:
5454 mat->spd_set = PETSC_TRUE;
5455 mat->spd = flg;
5456 if (flg) {
5457 mat->symmetric = PETSC_TRUE;
5458 mat->structurally_symmetric = PETSC_TRUE;
5459 mat->symmetric_set = PETSC_TRUE;
5460 mat->structurally_symmetric_set = PETSC_TRUE;
5461 }
5462 break;
5463 case MAT_SYMMETRIC:
5464 mat->symmetric = flg;
5465 if (flg) mat->structurally_symmetric = PETSC_TRUE;
5466 mat->symmetric_set = PETSC_TRUE;
5467 mat->structurally_symmetric_set = flg;
5468#if !defined(PETSC_USE_COMPLEX)
5469 mat->hermitian = flg;
5470 mat->hermitian_set = PETSC_TRUE;
5471#endif
5472 break;
5473 case MAT_HERMITIAN:
5474 mat->hermitian = flg;
5475 if (flg) mat->structurally_symmetric = PETSC_TRUE;
5476 mat->hermitian_set = PETSC_TRUE;
5477 mat->structurally_symmetric_set = flg;
5478#if !defined(PETSC_USE_COMPLEX)
5479 mat->symmetric = flg;
5480 mat->symmetric_set = PETSC_TRUE;
5481#endif
5482 break;
5483 case MAT_STRUCTURALLY_SYMMETRIC:
5484 mat->structurally_symmetric = flg;
5485 mat->structurally_symmetric_set = PETSC_TRUE;
5486 break;
5487 case MAT_SYMMETRY_ETERNAL:
5488 mat->symmetric_eternal = flg;
5489 break;
5490 case MAT_STRUCTURE_ONLY:
5491 mat->structure_only = flg;
5492 break;
5493 case MAT_SORTED_FULL:
5494 mat->sortedfull = flg;
5495 break;
5496 default:
5497 break;
5498 }
5499 if (mat->ops->setoption) {
5500 ierr = (*mat->ops->setoption)(mat,op,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5500,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5501 }
5502 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5503}
5504
5505/*@
5506 MatGetOption - Gets a parameter option that has been set for a matrix.
5507
5508 Logically Collective on Mat for certain operations, such as MAT_SPD, not collective for MAT_ROW_ORIENTED, see MatOption
5509
5510 Input Parameters:
5511+ mat - the matrix
5512- option - the option, this only responds to certain options, check the code for which ones
5513
5514 Output Parameter:
5515. flg - turn the option on (PETSC_TRUE) or off (PETSC_FALSE)
5516
5517 Notes:
5518 Can only be called after MatSetSizes() and MatSetType() have been set.
5519
5520 Level: intermediate
5521
5522.seealso: MatOption, MatSetOption()
5523
5524@*/
5525PetscErrorCode MatGetOption(Mat mat,MatOption op,PetscBool *flg)
5526{
5527 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5527; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5528 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5528,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5528,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5528,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5528,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5529 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5529,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5530
5531 if (((int) op) <= MAT_OPTION_MIN || ((int) op) >= MAT_OPTION_MAX) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Options %d is out of range",(int)op)return PetscError(PetscObjectComm((PetscObject)mat),5531,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Options %d is out of range",(int)op)
;
5532 if (!((PetscObject)mat)->type_name) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_TYPENOTSET,"Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()")return PetscError(PetscObjectComm((PetscObject)mat),5532,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",89,PETSC_ERROR_INITIAL
,"Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()"
)
;
5533
5534 switch (op) {
5535 case MAT_NO_OFF_PROC_ENTRIES:
5536 *flg = mat->nooffprocentries;
5537 break;
5538 case MAT_NO_OFF_PROC_ZERO_ROWS:
5539 *flg = mat->nooffproczerorows;
5540 break;
5541 case MAT_SYMMETRIC:
5542 *flg = mat->symmetric;
5543 break;
5544 case MAT_HERMITIAN:
5545 *flg = mat->hermitian;
5546 break;
5547 case MAT_STRUCTURALLY_SYMMETRIC:
5548 *flg = mat->structurally_symmetric;
5549 break;
5550 case MAT_SYMMETRY_ETERNAL:
5551 *flg = mat->symmetric_eternal;
5552 break;
5553 case MAT_SPD:
5554 *flg = mat->spd;
5555 break;
5556 default:
5557 break;
5558 }
5559 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5560}
5561
5562/*@
5563 MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
5564 this routine retains the old nonzero structure.
5565
5566 Logically Collective on Mat
5567
5568 Input Parameters:
5569. mat - the matrix
5570
5571 Level: intermediate
5572
5573 Notes:
5574 If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
5575 See the Performance chapter of the users manual for information on preallocating matrices.
5576
5577.seealso: MatZeroRows()
5578@*/
5579PetscErrorCode MatZeroEntries(Mat mat)
5580{
5581 PetscErrorCode ierr;
5582
5583 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5583; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5584 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5584,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5584,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5584,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5584,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5585 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5585,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5586 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5586,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5587 if (mat->insertmode != NOT_SET_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for matrices where you have set values but not yet assembled")return PetscError(((MPI_Comm)0x44000001),5587,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for matrices where you have set values but not yet assembled"
)
;
5588 if (!mat->ops->zeroentries) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5588,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5589 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5589,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5590
5591 ierr = PetscLogEventBegin(MAT_ZeroEntries,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ZeroEntries].active) ? (*PetscLogPLB)((MAT_ZeroEntries),
0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5591,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5592 ierr = (*mat->ops->zeroentries)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5592,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5593 ierr = PetscLogEventEnd(MAT_ZeroEntries,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ZeroEntries].active) ? (*PetscLogPLE)((MAT_ZeroEntries),
0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5593,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5594 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5594,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5595 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5596}
5597
5598/*@
5599 MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
5600 of a set of rows and columns of a matrix.
5601
5602 Collective on Mat
5603
5604 Input Parameters:
5605+ mat - the matrix
5606. numRows - the number of rows to remove
5607. rows - the global row indices
5608. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5609. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5610- b - optional vector of right hand side, that will be adjusted by provided solution
5611
5612 Notes:
5613 This does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
5614
5615 The user can set a value in the diagonal entry (or for the AIJ and
5616 row formats can optionally remove the main diagonal entry from the
5617 nonzero structure as well, by passing 0.0 as the final argument).
5618
5619 For the parallel case, all processes that share the matrix (i.e.,
5620 those in the communicator used for matrix creation) MUST call this
5621 routine, regardless of whether any rows being zeroed are owned by
5622 them.
5623
5624 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5625 list only rows local to itself).
5626
5627 The option MAT_NO_OFF_PROC_ZERO_ROWS does not apply to this routine.
5628
5629 Level: intermediate
5630
5631.seealso: MatZeroRowsIS(), MatZeroRows(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5632 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
5633@*/
5634PetscErrorCode MatZeroRowsColumns(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
5635{
5636 PetscErrorCode ierr;
5637
5638 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5638; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5639 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5639,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5639,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5639,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5639,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5640 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5640,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5641 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),5641
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),5641,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
5642 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5642,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5643 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5643,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5644 if (!mat->ops->zerorowscolumns) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5644,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5645 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5645,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5646
5647 ierr = (*mat->ops->zerorowscolumns)(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5647,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5648 ierr = MatViewFromOptions(mat,NULL((void*)0),"-mat_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5648,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5649 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5649,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5650 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5651}
5652
5653/*@
5654 MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
5655 of a set of rows and columns of a matrix.
5656
5657 Collective on Mat
5658
5659 Input Parameters:
5660+ mat - the matrix
5661. is - the rows to zero
5662. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5663. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5664- b - optional vector of right hand side, that will be adjusted by provided solution
5665
5666 Notes:
5667 This does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
5668
5669 The user can set a value in the diagonal entry (or for the AIJ and
5670 row formats can optionally remove the main diagonal entry from the
5671 nonzero structure as well, by passing 0.0 as the final argument).
5672
5673 For the parallel case, all processes that share the matrix (i.e.,
5674 those in the communicator used for matrix creation) MUST call this
5675 routine, regardless of whether any rows being zeroed are owned by
5676 them.
5677
5678 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5679 list only rows local to itself).
5680
5681 The option MAT_NO_OFF_PROC_ZERO_ROWS does not apply to this routine.
5682
5683 Level: intermediate
5684
5685.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5686 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRows(), MatZeroRowsColumnsStencil()
5687@*/
5688PetscErrorCode MatZeroRowsColumnsIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
5689{
5690 PetscErrorCode ierr;
5691 PetscInt numRows;
5692 const PetscInt *rows;
5693
5694 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5694; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5695 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5695,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5695,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5695,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5695,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5696 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),5696,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),5696,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5696,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),5696,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
5697 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5697,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5698 PetscValidType(is,2)do { if (!((PetscObject)is)->type_name) return PetscError(
((MPI_Comm)0x44000001),5698,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)is)->class_name,2); } while (0)
;
5699 ierr = ISGetLocalSize(is,&numRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5699,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5700 ierr = ISGetIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5700,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5701 ierr = MatZeroRowsColumns(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5701,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5702 ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5702,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5703 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5704}
5705
5706/*@
5707 MatZeroRows - Zeros all entries (except possibly the main diagonal)
5708 of a set of rows of a matrix.
5709
5710 Collective on Mat
5711
5712 Input Parameters:
5713+ mat - the matrix
5714. numRows - the number of rows to remove
5715. rows - the global row indices
5716. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5717. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5718- b - optional vector of right hand side, that will be adjusted by provided solution
5719
5720 Notes:
5721 For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
5722 but does not release memory. For the dense and block diagonal
5723 formats this does not alter the nonzero structure.
5724
5725 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
5726 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
5727 merely zeroed.
5728
5729 The user can set a value in the diagonal entry (or for the AIJ and
5730 row formats can optionally remove the main diagonal entry from the
5731 nonzero structure as well, by passing 0.0 as the final argument).
5732
5733 For the parallel case, all processes that share the matrix (i.e.,
5734 those in the communicator used for matrix creation) MUST call this
5735 routine, regardless of whether any rows being zeroed are owned by
5736 them.
5737
5738 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5739 list only rows local to itself).
5740
5741 You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
5742 owns that are to be zeroed. This saves a global synchronization in the implementation.
5743
5744 Level: intermediate
5745
5746.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5747 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
5748@*/
5749PetscErrorCode MatZeroRows(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
5750{
5751 PetscErrorCode ierr;
5752
5753 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5753; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5754 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5754,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5754,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5755 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5755,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5756 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),5756
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),5756,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
5757 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),5757,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
5758 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),5758,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
5759 if (!mat->ops->zerorows) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),5759,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
5760 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),5760,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
5761
5762 ierr = (*mat->ops->zerorows)(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5762,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5763 ierr = MatViewFromOptions(mat,NULL((void*)0),"-mat_view");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5763,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5764 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5764,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5765 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5766}
5767
5768/*@
5769 MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
5770 of a set of rows of a matrix.
5771
5772 Collective on Mat
5773
5774 Input Parameters:
5775+ mat - the matrix
5776. is - index set of rows to remove
5777. diag - value put in all diagonals of eliminated rows
5778. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5779- b - optional vector of right hand side, that will be adjusted by provided solution
5780
5781 Notes:
5782 For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
5783 but does not release memory. For the dense and block diagonal
5784 formats this does not alter the nonzero structure.
5785
5786 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
5787 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
5788 merely zeroed.
5789
5790 The user can set a value in the diagonal entry (or for the AIJ and
5791 row formats can optionally remove the main diagonal entry from the
5792 nonzero structure as well, by passing 0.0 as the final argument).
5793
5794 For the parallel case, all processes that share the matrix (i.e.,
5795 those in the communicator used for matrix creation) MUST call this
5796 routine, regardless of whether any rows being zeroed are owned by
5797 them.
5798
5799 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5800 list only rows local to itself).
5801
5802 You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
5803 owns that are to be zeroed. This saves a global synchronization in the implementation.
5804
5805 Level: intermediate
5806
5807.seealso: MatZeroRows(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5808 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
5809@*/
5810PetscErrorCode MatZeroRowsIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
5811{
5812 PetscInt numRows;
5813 const PetscInt *rows;
5814 PetscErrorCode ierr;
5815
5816 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5816; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5817 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5817,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5817,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5817,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5817,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5818 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5818,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5819 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),5819,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),5819,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),5819,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),5819,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
5820 ierr = ISGetLocalSize(is,&numRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5820,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5821 ierr = ISGetIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5821,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5822 ierr = MatZeroRows(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5822,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5823 ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5823,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5824 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5825}
5826
5827/*@
5828 MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
5829 of a set of rows of a matrix. These rows must be local to the process.
5830
5831 Collective on Mat
5832
5833 Input Parameters:
5834+ mat - the matrix
5835. numRows - the number of rows to remove
5836. rows - the grid coordinates (and component number when dof > 1) for matrix rows
5837. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5838. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5839- b - optional vector of right hand side, that will be adjusted by provided solution
5840
5841 Notes:
5842 For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
5843 but does not release memory. For the dense and block diagonal
5844 formats this does not alter the nonzero structure.
5845
5846 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
5847 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
5848 merely zeroed.
5849
5850 The user can set a value in the diagonal entry (or for the AIJ and
5851 row formats can optionally remove the main diagonal entry from the
5852 nonzero structure as well, by passing 0.0 as the final argument).
5853
5854 For the parallel case, all processes that share the matrix (i.e.,
5855 those in the communicator used for matrix creation) MUST call this
5856 routine, regardless of whether any rows being zeroed are owned by
5857 them.
5858
5859 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5860 list only rows local to itself).
5861
5862 The grid coordinates are across the entire grid, not just the local portion
5863
5864 In Fortran idxm and idxn should be declared as
5865$ MatStencil idxm(4,m)
5866 and the values inserted using
5867$ idxm(MatStencil_i,1) = i
5868$ idxm(MatStencil_j,1) = j
5869$ idxm(MatStencil_k,1) = k
5870$ idxm(MatStencil_c,1) = c
5871 etc
5872
5873 For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
5874 obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
5875 etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
5876 DM_BOUNDARY_PERIODIC boundary type.
5877
5878 For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
5879 a single value per point) you can skip filling those indices.
5880
5881 Level: intermediate
5882
5883.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsl(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5884 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
5885@*/
5886PetscErrorCode MatZeroRowsStencil(Mat mat,PetscInt numRows,const MatStencil rows[],PetscScalar diag,Vec x,Vec b)
5887{
5888 PetscInt dim = mat->stencil.dim;
5889 PetscInt sdim = dim - (1 - (PetscInt) mat->stencil.noc);
5890 PetscInt *dims = mat->stencil.dims+1;
5891 PetscInt *starts = mat->stencil.starts;
5892 PetscInt *dxm = (PetscInt*) rows;
5893 PetscInt *jdxm, i, j, tmp, numNewRows = 0;
5894 PetscErrorCode ierr;
5895
5896 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5896; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5897 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5897,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5897,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5897,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5897,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5898 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5898,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5899 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),5899
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),5899,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
5900
5901 ierr = PetscMalloc1(numRows, &jdxm)PetscMallocA(1,PETSC_FALSE,5901,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(numRows)*sizeof(**(&jdxm)),(&jdxm))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5901,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5902 for (i = 0; i < numRows; ++i) {
5903 /* Skip unused dimensions (they are ordered k, j, i, c) */
5904 for (j = 0; j < 3-sdim; ++j) dxm++;
5905 /* Local index in X dir */
5906 tmp = *dxm++ - starts[0];
5907 /* Loop over remaining dimensions */
5908 for (j = 0; j < dim-1; ++j) {
5909 /* If nonlocal, set index to be negative */
5910 if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT(-2147483647 - 1);
5911 /* Update local index */
5912 else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
5913 }
5914 /* Skip component slot if necessary */
5915 if (mat->stencil.noc) dxm++;
5916 /* Local row number */
5917 if (tmp >= 0) {
5918 jdxm[numNewRows++] = tmp;
5919 }
5920 }
5921 ierr = MatZeroRowsLocal(mat,numNewRows,jdxm,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5921,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5922 ierr = PetscFree(jdxm)((*PetscTrFree)((void*)(jdxm),5922,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((jdxm) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),5922,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
5923 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
5924}
5925
5926/*@
5927 MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
5928 of a set of rows and columns of a matrix.
5929
5930 Collective on Mat
5931
5932 Input Parameters:
5933+ mat - the matrix
5934. numRows - the number of rows/columns to remove
5935. rows - the grid coordinates (and component number when dof > 1) for matrix rows
5936. diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5937. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5938- b - optional vector of right hand side, that will be adjusted by provided solution
5939
5940 Notes:
5941 For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
5942 but does not release memory. For the dense and block diagonal
5943 formats this does not alter the nonzero structure.
5944
5945 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
5946 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
5947 merely zeroed.
5948
5949 The user can set a value in the diagonal entry (or for the AIJ and
5950 row formats can optionally remove the main diagonal entry from the
5951 nonzero structure as well, by passing 0.0 as the final argument).
5952
5953 For the parallel case, all processes that share the matrix (i.e.,
5954 those in the communicator used for matrix creation) MUST call this
5955 routine, regardless of whether any rows being zeroed are owned by
5956 them.
5957
5958 Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5959 list only rows local to itself, but the row/column numbers are given in local numbering).
5960
5961 The grid coordinates are across the entire grid, not just the local portion
5962
5963 In Fortran idxm and idxn should be declared as
5964$ MatStencil idxm(4,m)
5965 and the values inserted using
5966$ idxm(MatStencil_i,1) = i
5967$ idxm(MatStencil_j,1) = j
5968$ idxm(MatStencil_k,1) = k
5969$ idxm(MatStencil_c,1) = c
5970 etc
5971
5972 For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
5973 obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
5974 etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
5975 DM_BOUNDARY_PERIODIC boundary type.
5976
5977 For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
5978 a single value per point) you can skip filling those indices.
5979
5980 Level: intermediate
5981
5982.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
5983 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRows()
5984@*/
5985PetscErrorCode MatZeroRowsColumnsStencil(Mat mat,PetscInt numRows,const MatStencil rows[],PetscScalar diag,Vec x,Vec b)
5986{
5987 PetscInt dim = mat->stencil.dim;
5988 PetscInt sdim = dim - (1 - (PetscInt) mat->stencil.noc);
5989 PetscInt *dims = mat->stencil.dims+1;
5990 PetscInt *starts = mat->stencil.starts;
5991 PetscInt *dxm = (PetscInt*) rows;
5992 PetscInt *jdxm, i, j, tmp, numNewRows = 0;
5993 PetscErrorCode ierr;
5994
5995 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 5995; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
5996 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),5996,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),5996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),5996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),5996,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
5997 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),5997,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
5998 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),5998
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),5998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
5999
6000 ierr = PetscMalloc1(numRows, &jdxm)PetscMallocA(1,PETSC_FALSE,6000,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(numRows)*sizeof(**(&jdxm)),(&jdxm))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6000,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6001 for (i = 0; i < numRows; ++i) {
6002 /* Skip unused dimensions (they are ordered k, j, i, c) */
6003 for (j = 0; j < 3-sdim; ++j) dxm++;
6004 /* Local index in X dir */
6005 tmp = *dxm++ - starts[0];
6006 /* Loop over remaining dimensions */
6007 for (j = 0; j < dim-1; ++j) {
6008 /* If nonlocal, set index to be negative */
6009 if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT(-2147483647 - 1);
6010 /* Update local index */
6011 else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
6012 }
6013 /* Skip component slot if necessary */
6014 if (mat->stencil.noc) dxm++;
6015 /* Local row number */
6016 if (tmp >= 0) {
6017 jdxm[numNewRows++] = tmp;
6018 }
6019 }
6020 ierr = MatZeroRowsColumnsLocal(mat,numNewRows,jdxm,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6020,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6021 ierr = PetscFree(jdxm)((*PetscTrFree)((void*)(jdxm),6021,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((jdxm) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6021,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6022 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6023}
6024
6025/*@C
6026 MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6027 of a set of rows of a matrix; using local numbering of rows.
6028
6029 Collective on Mat
6030
6031 Input Parameters:
6032+ mat - the matrix
6033. numRows - the number of rows to remove
6034. rows - the global row indices
6035. diag - value put in all diagonals of eliminated rows
6036. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6037- b - optional vector of right hand side, that will be adjusted by provided solution
6038
6039 Notes:
6040 Before calling MatZeroRowsLocal(), the user must first set the
6041 local-to-global mapping by calling MatSetLocalToGlobalMapping().
6042
6043 For the AIJ matrix formats this removes the old nonzero structure,
6044 but does not release memory. For the dense and block diagonal
6045 formats this does not alter the nonzero structure.
6046
6047 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6048 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6049 merely zeroed.
6050
6051 The user can set a value in the diagonal entry (or for the AIJ and
6052 row formats can optionally remove the main diagonal entry from the
6053 nonzero structure as well, by passing 0.0 as the final argument).
6054
6055 You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
6056 owns that are to be zeroed. This saves a global synchronization in the implementation.
6057
6058 Level: intermediate
6059
6060.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRows(), MatSetOption(),
6061 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
6062@*/
6063PetscErrorCode MatZeroRowsLocal(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
6064{
6065 PetscErrorCode ierr;
6066
6067 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6067; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6068 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6068,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6068,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6068,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6068,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6069 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6069,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6070 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),6070
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6070,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
6071 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6071,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6072 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6072,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6073 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6073,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6074
6075 if (mat->ops->zerorowslocal) {
6076 ierr = (*mat->ops->zerorowslocal)(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6076,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6077 } else {
6078 IS is, newis;
6079 const PetscInt *newRows;
6080
6081 if (!mat->rmap->mapping) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Need to provide local to global mapping to matrix first")return PetscError(PetscObjectComm((PetscObject)mat),6081,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Need to provide local to global mapping to matrix first")
;
6082 ierr = ISCreateGeneral(PETSC_COMM_SELF((MPI_Comm)0x44000001),numRows,rows,PETSC_COPY_VALUES,&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6082,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6083 ierr = ISLocalToGlobalMappingApplyIS(mat->rmap->mapping,is,&newis);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6083,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6084 ierr = ISGetIndices(newis,&newRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6084,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6085 ierr = (*mat->ops->zerorows)(mat,numRows,newRows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6085,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6086 ierr = ISRestoreIndices(newis,&newRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6086,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6087 ierr = ISDestroy(&newis);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6087,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6088 ierr = ISDestroy(&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6088,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6089 }
6090 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6090,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6091 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6092}
6093
6094/*@
6095 MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6096 of a set of rows of a matrix; using local numbering of rows.
6097
6098 Collective on Mat
6099
6100 Input Parameters:
6101+ mat - the matrix
6102. is - index set of rows to remove
6103. diag - value put in all diagonals of eliminated rows
6104. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6105- b - optional vector of right hand side, that will be adjusted by provided solution
6106
6107 Notes:
6108 Before calling MatZeroRowsLocalIS(), the user must first set the
6109 local-to-global mapping by calling MatSetLocalToGlobalMapping().
6110
6111 For the AIJ matrix formats this removes the old nonzero structure,
6112 but does not release memory. For the dense and block diagonal
6113 formats this does not alter the nonzero structure.
6114
6115 If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6116 of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6117 merely zeroed.
6118
6119 The user can set a value in the diagonal entry (or for the AIJ and
6120 row formats can optionally remove the main diagonal entry from the
6121 nonzero structure as well, by passing 0.0 as the final argument).
6122
6123 You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
6124 owns that are to be zeroed. This saves a global synchronization in the implementation.
6125
6126 Level: intermediate
6127
6128.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRows(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
6129 MatZeroRowsColumnsLocal(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
6130@*/
6131PetscErrorCode MatZeroRowsLocalIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
6132{
6133 PetscErrorCode ierr;
6134 PetscInt numRows;
6135 const PetscInt *rows;
6136
6137 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6137; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6138 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6138,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6138,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6138,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6138,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6139 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6139,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6140 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),6140,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),6140,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),6140,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),6140,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
6141 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6141,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6142 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6142,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6143 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6143,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6144
6145 ierr = ISGetLocalSize(is,&numRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6145,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6146 ierr = ISGetIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6146,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6147 ierr = MatZeroRowsLocal(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6147,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6148 ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6148,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6149 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6150}
6151
6152/*@
6153 MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6154 of a set of rows and columns of a matrix; using local numbering of rows.
6155
6156 Collective on Mat
6157
6158 Input Parameters:
6159+ mat - the matrix
6160. numRows - the number of rows to remove
6161. rows - the global row indices
6162. diag - value put in all diagonals of eliminated rows
6163. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6164- b - optional vector of right hand side, that will be adjusted by provided solution
6165
6166 Notes:
6167 Before calling MatZeroRowsColumnsLocal(), the user must first set the
6168 local-to-global mapping by calling MatSetLocalToGlobalMapping().
6169
6170 The user can set a value in the diagonal entry (or for the AIJ and
6171 row formats can optionally remove the main diagonal entry from the
6172 nonzero structure as well, by passing 0.0 as the final argument).
6173
6174 Level: intermediate
6175
6176.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
6177 MatZeroRows(), MatZeroRowsColumnsLocalIS(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
6178@*/
6179PetscErrorCode MatZeroRowsColumnsLocal(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
6180{
6181 PetscErrorCode ierr;
6182 IS is, newis;
6183 const PetscInt *newRows;
6184
6185 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6185; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6186 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6186,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6186,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6186,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6186,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6187 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6187,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6188 if (numRows) PetscValidIntPointer(rows,3)do { if (!rows) return PetscError(((MPI_Comm)0x44000001),6188
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(rows,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),6188,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
6189 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6189,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6190 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6190,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6191 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6192
6193 if (!mat->cmap->mapping) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Need to provide local to global mapping to matrix first")return PetscError(PetscObjectComm((PetscObject)mat),6193,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Need to provide local to global mapping to matrix first")
;
6194 ierr = ISCreateGeneral(PETSC_COMM_SELF((MPI_Comm)0x44000001),numRows,rows,PETSC_COPY_VALUES,&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6194,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6195 ierr = ISLocalToGlobalMappingApplyIS(mat->cmap->mapping,is,&newis);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6195,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6196 ierr = ISGetIndices(newis,&newRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6196,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6197 ierr = (*mat->ops->zerorowscolumns)(mat,numRows,newRows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6197,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6198 ierr = ISRestoreIndices(newis,&newRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6198,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6199 ierr = ISDestroy(&newis);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6199,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6200 ierr = ISDestroy(&is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6200,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6201 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6201,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6202 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6203}
6204
6205/*@
6206 MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6207 of a set of rows and columns of a matrix; using local numbering of rows.
6208
6209 Collective on Mat
6210
6211 Input Parameters:
6212+ mat - the matrix
6213. is - index set of rows to remove
6214. diag - value put in all diagonals of eliminated rows
6215. x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6216- b - optional vector of right hand side, that will be adjusted by provided solution
6217
6218 Notes:
6219 Before calling MatZeroRowsColumnsLocalIS(), the user must first set the
6220 local-to-global mapping by calling MatSetLocalToGlobalMapping().
6221
6222 The user can set a value in the diagonal entry (or for the AIJ and
6223 row formats can optionally remove the main diagonal entry from the
6224 nonzero structure as well, by passing 0.0 as the final argument).
6225
6226 Level: intermediate
6227
6228.seealso: MatZeroRowsIS(), MatZeroRowsColumns(), MatZeroRowsLocalIS(), MatZeroRowsStencil(), MatZeroEntries(), MatZeroRowsLocal(), MatSetOption(),
6229 MatZeroRowsColumnsLocal(), MatZeroRows(), MatZeroRowsColumnsIS(), MatZeroRowsColumnsStencil()
6230@*/
6231PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
6232{
6233 PetscErrorCode ierr;
6234 PetscInt numRows;
6235 const PetscInt *rows;
6236
6237 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6237; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6238 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6238,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6238,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6238,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6238,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6239 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6239,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6240 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),6240,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),6240,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),6240,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),6240,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
6241 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6241,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6242 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6242,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6243 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6243,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6244
6245 ierr = ISGetLocalSize(is,&numRows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6245,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6246 ierr = ISGetIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6246,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6247 ierr = MatZeroRowsColumnsLocal(mat,numRows,rows,diag,x,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6247,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6248 ierr = ISRestoreIndices(is,&rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6248,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6249 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6250}
6251
6252/*@C
6253 MatGetSize - Returns the numbers of rows and columns in a matrix.
6254
6255 Not Collective
6256
6257 Input Parameter:
6258. mat - the matrix
6259
6260 Output Parameters:
6261+ m - the number of global rows
6262- n - the number of global columns
6263
6264 Note: both output parameters can be NULL on input.
6265
6266 Level: beginner
6267
6268.seealso: MatGetLocalSize()
6269@*/
6270PetscErrorCode MatGetSize(Mat mat,PetscInt *m,PetscInt *n)
6271{
6272 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6272; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6273 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6273,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6273,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6274 if (m) *m = mat->rmap->N;
6275 if (n) *n = mat->cmap->N;
6276 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6277}
6278
6279/*@C
6280 MatGetLocalSize - Returns the number of rows and columns in a matrix
6281 stored locally. This information may be implementation dependent, so
6282 use with care.
6283
6284 Not Collective
6285
6286 Input Parameters:
6287. mat - the matrix
6288
6289 Output Parameters:
6290+ m - the number of local rows
6291- n - the number of local columns
6292
6293 Note: both output parameters can be NULL on input.
6294
6295 Level: beginner
6296
6297.seealso: MatGetSize()
6298@*/
6299PetscErrorCode MatGetLocalSize(Mat mat,PetscInt *m,PetscInt *n)
6300{
6301 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6301; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6302 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6302,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6302,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6302,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6303 if (m) PetscValidIntPointer(m,2)do { if (!m) return PetscError(((MPI_Comm)0x44000001),6303,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(m,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6303,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",2); } while (0
)
;
6304 if (n) PetscValidIntPointer(n,3)do { if (!n) return PetscError(((MPI_Comm)0x44000001),6304,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6304,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",3); } while (0
)
;
6305 if (m) *m = mat->rmap->n;
6306 if (n) *n = mat->cmap->n;
6307 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6308}
6309
6310/*@C
6311 MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a vector one multiplies by that owned by
6312 this processor. (The columns of the "diagonal block")
6313
6314 Not Collective, unless matrix has not been allocated, then collective on Mat
6315
6316 Input Parameters:
6317. mat - the matrix
6318
6319 Output Parameters:
6320+ m - the global index of the first local column
6321- n - one more than the global index of the last local column
6322
6323 Notes:
6324 both output parameters can be NULL on input.
6325
6326 Level: developer
6327
6328.seealso: MatGetOwnershipRange(), MatGetOwnershipRanges(), MatGetOwnershipRangesColumn()
6329
6330@*/
6331PetscErrorCode MatGetOwnershipRangeColumn(Mat mat,PetscInt *m,PetscInt *n)
6332{
6333 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6333; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6334 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6334,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6334,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6335 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6335,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6336 if (m) PetscValidIntPointer(m,2)do { if (!m) return PetscError(((MPI_Comm)0x44000001),6336,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(m,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6336,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",2); } while (0
)
;
6337 if (n) PetscValidIntPointer(n,3)do { if (!n) return PetscError(((MPI_Comm)0x44000001),6337,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6337,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",3); } while (0
)
;
6338 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6338,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6339 if (m) *m = mat->cmap->rstart;
6340 if (n) *n = mat->cmap->rend;
6341 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6342}
6343
6344/*@C
6345 MatGetOwnershipRange - Returns the range of matrix rows owned by
6346 this processor, assuming that the matrix is laid out with the first
6347 n1 rows on the first processor, the next n2 rows on the second, etc.
6348 For certain parallel layouts this range may not be well defined.
6349
6350 Not Collective
6351
6352 Input Parameters:
6353. mat - the matrix
6354
6355 Output Parameters:
6356+ m - the global index of the first local row
6357- n - one more than the global index of the last local row
6358
6359 Note: Both output parameters can be NULL on input.
6360$ This function requires that the matrix be preallocated. If you have not preallocated, consider using
6361$ PetscSplitOwnership(MPI_Comm comm, PetscInt *n, PetscInt *N)
6362$ and then MPI_Scan() to calculate prefix sums of the local sizes.
6363
6364 Level: beginner
6365
6366.seealso: MatGetOwnershipRanges(), MatGetOwnershipRangeColumn(), MatGetOwnershipRangesColumn(), PetscSplitOwnership(), PetscSplitOwnershipBlock()
6367
6368@*/
6369PetscErrorCode MatGetOwnershipRange(Mat mat,PetscInt *m,PetscInt *n)
6370{
6371 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6371; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6372 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6372,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6372,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6372,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6372,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6373 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6373,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6374 if (m) PetscValidIntPointer(m,2)do { if (!m) return PetscError(((MPI_Comm)0x44000001),6374,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(m,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6374,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",2); } while (0
)
;
6375 if (n) PetscValidIntPointer(n,3)do { if (!n) return PetscError(((MPI_Comm)0x44000001),6375,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),6375,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",3); } while (0
)
;
6376 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6376,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6377 if (m) *m = mat->rmap->rstart;
6378 if (n) *n = mat->rmap->rend;
6379 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6380}
6381
6382/*@C
6383 MatGetOwnershipRanges - Returns the range of matrix rows owned by
6384 each process
6385
6386 Not Collective, unless matrix has not been allocated, then collective on Mat
6387
6388 Input Parameters:
6389. mat - the matrix
6390
6391 Output Parameters:
6392. ranges - start of each processors portion plus one more than the total length at the end
6393
6394 Level: beginner
6395
6396.seealso: MatGetOwnershipRange(), MatGetOwnershipRangeColumn(), MatGetOwnershipRangesColumn()
6397
6398@*/
6399PetscErrorCode MatGetOwnershipRanges(Mat mat,const PetscInt **ranges)
6400{
6401 PetscErrorCode ierr;
6402
6403 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6403; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6404 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6404,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6404,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6404,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6404,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6405 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6405,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6406 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6406,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6407 ierr = PetscLayoutGetRanges(mat->rmap,ranges);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6407,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6408 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6409}
6410
6411/*@C
6412 MatGetOwnershipRangesColumn - Returns the range of matrix columns associated with rows of a vector one multiplies by that owned by
6413 this processor. (The columns of the "diagonal blocks" for each process)
6414
6415 Not Collective, unless matrix has not been allocated, then collective on Mat
6416
6417 Input Parameters:
6418. mat - the matrix
6419
6420 Output Parameters:
6421. ranges - start of each processors portion plus one more then the total length at the end
6422
6423 Level: beginner
6424
6425.seealso: MatGetOwnershipRange(), MatGetOwnershipRangeColumn(), MatGetOwnershipRanges()
6426
6427@*/
6428PetscErrorCode MatGetOwnershipRangesColumn(Mat mat,const PetscInt **ranges)
6429{
6430 PetscErrorCode ierr;
6431
6432 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6432; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6433 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6433,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6433,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6433,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6433,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6434 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6434,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6435 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6435,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6436 ierr = PetscLayoutGetRanges(mat->cmap,ranges);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6436,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6437 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6438}
6439
6440/*@C
6441 MatGetOwnershipIS - Get row and column ownership as index sets
6442
6443 Not Collective
6444
6445 Input Arguments:
6446. A - matrix of type Elemental
6447
6448 Output Arguments:
6449+ rows - rows in which this process owns elements
6450- cols - columns in which this process owns elements
6451
6452 Level: intermediate
6453
6454.seealso: MatGetOwnershipRange(), MatGetOwnershipRangeColumn(), MatSetValues(), MATELEMENTAL
6455@*/
6456PetscErrorCode MatGetOwnershipIS(Mat A,IS *rows,IS *cols)
6457{
6458 PetscErrorCode ierr,(*f)(Mat,IS*,IS*);
6459
6460 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6460; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6461 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6461,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
6462 ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetOwnershipIS_C",&f)PetscObjectQueryFunction_Private(((PetscObject)A),("MatGetOwnershipIS_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6462,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6463 if (f) {
6464 ierr = (*f)(A,rows,cols);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6464,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6465 } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6466 if (rows) {ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),A->rmap->n,A->rmap->rstart,1,rows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6466,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
6467 if (cols) {ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),A->cmap->N,0,1,cols);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6467,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
6468 }
6469 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6470}
6471
6472/*@C
6473 MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix.
6474 Uses levels of fill only, not drop tolerance. Use MatLUFactorNumeric()
6475 to complete the factorization.
6476
6477 Collective on Mat
6478
6479 Input Parameters:
6480+ mat - the matrix
6481. row - row permutation
6482. column - column permutation
6483- info - structure containing
6484$ levels - number of levels of fill.
6485$ expected fill - as ratio of original fill.
6486$ 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6487 missing diagonal entries)
6488
6489 Output Parameters:
6490. fact - new matrix that has been symbolically factored
6491
6492 Notes:
6493 See Users-Manual: ch_mat for additional information about choosing the fill factor for better efficiency.
6494
6495 Most users should employ the simplified KSP interface for linear solvers
6496 instead of working directly with matrix algebra routines such as this.
6497 See, e.g., KSPCreate().
6498
6499 Level: developer
6500
6501.seealso: MatLUFactorSymbolic(), MatLUFactorNumeric(), MatCholeskyFactor()
6502 MatGetOrdering(), MatFactorInfo
6503
6504 Note: this uses the definition of level of fill as in Y. Saad, 2003
6505
6506 Developer Note: fortran interface is not autogenerated as the f90
6507 interface defintion cannot be generated correctly [due to MatFactorInfo]
6508
6509 References:
6510 Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6511@*/
6512PetscErrorCode MatILUFactorSymbolic(Mat fact,Mat mat,IS row,IS col,const MatFactorInfo *info)
6513{
6514 PetscErrorCode ierr;
6515
6516 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6516; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6517 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6517,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6517,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6517,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6517,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6518 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6518,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6519 PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),6519,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6519,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6519,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),6519,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
6520 PetscValidHeaderSpecific(col,IS_CLASSID,3)do { if (!col) return PetscError(((MPI_Comm)0x44000001),6520,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(col,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6520,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(col))->classid != IS_CLASSID) { if
(((PetscObject)(col))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6520,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6520,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6521 PetscValidPointer(info,4)do { if (!info) return PetscError(((MPI_Comm)0x44000001),6521
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6521,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
6522 PetscValidPointer(fact,5)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),6522
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6522,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
6523 if (info->levels < 0) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Levels of fill negative %D",(PetscInt)info->levels)return PetscError(PetscObjectComm((PetscObject)mat),6523,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Levels of fill negative %D",(PetscInt)info->levels)
;
6524 if (info->fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Expected fill less than 1.0 %g",(double)info->fill)return PetscError(PetscObjectComm((PetscObject)mat),6524,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Expected fill less than 1.0 %g",(double)info->fill)
;
6525 if (!(fact)->ops->ilufactorsymbolic) {
6526 MatSolverType spackage;
6527 ierr = MatFactorGetSolverType(fact,&spackage);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6527,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6528 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic ILU using solver package %s",((PetscObject)mat)->type_name,spackage)return PetscError(PetscObjectComm((PetscObject)mat),6528,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix type %s symbolic ILU using solver package %s",((PetscObject
)mat)->type_name,spackage)
;
6529 }
6530 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6530,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6531 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6531,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6532 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6532,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
6533
6534 ierr = PetscLogEventBegin(MAT_ILUFactorSymbolic,mat,row,col,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ILUFactorSymbolic].active) ? (*PetscLogPLB)((MAT_ILUFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(row),(PetscObject)(col),
(PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6534,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6535 ierr = (fact->ops->ilufactorsymbolic)(fact,mat,row,col,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6535,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6536 ierr = PetscLogEventEnd(MAT_ILUFactorSymbolic,mat,row,col,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ILUFactorSymbolic].active) ? (*PetscLogPLE)((MAT_ILUFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(row),(PetscObject)(col),
(PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6536,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6537 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6538}
6539
6540/*@C
6541 MatICCFactorSymbolic - Performs symbolic incomplete
6542 Cholesky factorization for a symmetric matrix. Use
6543 MatCholeskyFactorNumeric() to complete the factorization.
6544
6545 Collective on Mat
6546
6547 Input Parameters:
6548+ mat - the matrix
6549. perm - row and column permutation
6550- info - structure containing
6551$ levels - number of levels of fill.
6552$ expected fill - as ratio of original fill.
6553
6554 Output Parameter:
6555. fact - the factored matrix
6556
6557 Notes:
6558 Most users should employ the KSP interface for linear solvers
6559 instead of working directly with matrix algebra routines such as this.
6560 See, e.g., KSPCreate().
6561
6562 Level: developer
6563
6564.seealso: MatCholeskyFactorNumeric(), MatCholeskyFactor(), MatFactorInfo
6565
6566 Note: this uses the definition of level of fill as in Y. Saad, 2003
6567
6568 Developer Note: fortran interface is not autogenerated as the f90
6569 interface defintion cannot be generated correctly [due to MatFactorInfo]
6570
6571 References:
6572 Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6573@*/
6574PetscErrorCode MatICCFactorSymbolic(Mat fact,Mat mat,IS perm,const MatFactorInfo *info)
6575{
6576 PetscErrorCode ierr;
6577
6578 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6578; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6579 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6579,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6579,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6579,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6579,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6580 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6581 PetscValidHeaderSpecific(perm,IS_CLASSID,2)do { if (!perm) return PetscError(((MPI_Comm)0x44000001),6581
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(perm,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6581,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(perm))->classid != IS_CLASSID) { if
(((PetscObject)(perm))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),6581,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),6581,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
6582 PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),6582
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6582,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
6583 PetscValidPointer(fact,4)do { if (!fact) return PetscError(((MPI_Comm)0x44000001),6583
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(fact,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6583,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
6584 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6584,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6585 if (info->levels < 0) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Levels negative %D",(PetscInt) info->levels)return PetscError(PetscObjectComm((PetscObject)mat),6585,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Levels negative %D",(PetscInt) info->levels)
;
6586 if (info->fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Expected fill less than 1.0 %g",(double)info->fill)return PetscError(PetscObjectComm((PetscObject)mat),6586,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Expected fill less than 1.0 %g",(double)info->fill)
;
6587 if (!(fact)->ops->iccfactorsymbolic) {
6588 MatSolverType spackage;
6589 ierr = MatFactorGetSolverType(fact,&spackage);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6589,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6590 SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic ICC using solver package %s",((PetscObject)mat)->type_name,spackage)return PetscError(PetscObjectComm((PetscObject)mat),6590,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix type %s symbolic ICC using solver package %s",((PetscObject
)mat)->type_name,spackage)
;
6591 }
6592 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6592,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6593 MatCheckPreallocated(mat,2)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6593,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"mat",__func__); } while (0)
;
6594
6595 ierr = PetscLogEventBegin(MAT_ICCFactorSymbolic,mat,perm,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ICCFactorSymbolic].active) ? (*PetscLogPLB)((MAT_ICCFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6595,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6596 ierr = (fact->ops->iccfactorsymbolic)(fact,mat,perm,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6596,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6597 ierr = PetscLogEventEnd(MAT_ICCFactorSymbolic,mat,perm,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_ICCFactorSymbolic].active) ? (*PetscLogPLE)((MAT_ICCFactorSymbolic
),0,(PetscObject)(mat),(PetscObject)(perm),(PetscObject)(0),(
PetscObject)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6597,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6598 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6599}
6600
6601/*@C
6602 MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
6603 points to an array of valid matrices, they may be reused to store the new
6604 submatrices.
6605
6606 Collective on Mat
6607
6608 Input Parameters:
6609+ mat - the matrix
6610. n - the number of submatrixes to be extracted (on this processor, may be zero)
6611. irow, icol - index sets of rows and columns to extract
6612- scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
6613
6614 Output Parameter:
6615. submat - the array of submatrices
6616
6617 Notes:
6618 MatCreateSubMatrices() can extract ONLY sequential submatrices
6619 (from both sequential and parallel matrices). Use MatCreateSubMatrix()
6620 to extract a parallel submatrix.
6621
6622 Some matrix types place restrictions on the row and column
6623 indices, such as that they be sorted or that they be equal to each other.
6624
6625 The index sets may not have duplicate entries.
6626
6627 When extracting submatrices from a parallel matrix, each processor can
6628 form a different submatrix by setting the rows and columns of its
6629 individual index sets according to the local submatrix desired.
6630
6631 When finished using the submatrices, the user should destroy
6632 them with MatDestroySubMatrices().
6633
6634 MAT_REUSE_MATRIX can only be used when the nonzero structure of the
6635 original matrix has not changed from that last call to MatCreateSubMatrices().
6636
6637 This routine creates the matrices in submat; you should NOT create them before
6638 calling it. It also allocates the array of matrix pointers submat.
6639
6640 For BAIJ matrices the index sets must respect the block structure, that is if they
6641 request one row/column in a block, they must request all rows/columns that are in
6642 that block. For example, if the block size is 2 you cannot request just row 0 and
6643 column 0.
6644
6645 Fortran Note:
6646 The Fortran interface is slightly different from that given below; it
6647 requires one to pass in as submat a Mat (integer) array of size at least n+1.
6648
6649 Level: advanced
6650
6651
6652.seealso: MatDestroySubMatrices(), MatCreateSubMatrix(), MatGetRow(), MatGetDiagonal(), MatReuse
6653@*/
6654PetscErrorCode MatCreateSubMatrices(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
6655{
6656 PetscErrorCode ierr;
6657 PetscInt i;
6658 PetscBool eq;
6659
6660 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6660; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6661 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6661,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6661,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6661,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6661,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6662 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6662,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6663 if (n) {
6664 PetscValidPointer(irow,3)do { if (!irow) return PetscError(((MPI_Comm)0x44000001),6664
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(irow,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6664,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
6665 PetscValidHeaderSpecific(*irow,IS_CLASSID,3)do { if (!*irow) return PetscError(((MPI_Comm)0x44000001),6665
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(*irow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6665,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(*irow))->classid != IS_CLASSID) { if
(((PetscObject)(*irow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6665,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6665,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6666 PetscValidPointer(icol,4)do { if (!icol) return PetscError(((MPI_Comm)0x44000001),6666
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(icol,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6666,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
6667 PetscValidHeaderSpecific(*icol,IS_CLASSID,4)do { if (!*icol) return PetscError(((MPI_Comm)0x44000001),6667
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",4); if (
!PetscCheckPointer(*icol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6667,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(*icol))->classid != IS_CLASSID) { if
(((PetscObject)(*icol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6667,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),6667,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
6668 }
6669 PetscValidPointer(submat,6)do { if (!submat) return PetscError(((MPI_Comm)0x44000001),6669
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",6); if
(!PetscCheckPointer(submat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),6669,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",6);
} while (0)
;
6670 if (n && scall == MAT_REUSE_MATRIX) {
6671 PetscValidPointer(*submat,6)do { if (!*submat) return PetscError(((MPI_Comm)0x44000001),6671
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",6); if
(!PetscCheckPointer(*submat,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),6671,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",6);
} while (0)
;
6672 PetscValidHeaderSpecific(**submat,MAT_CLASSID,6)do { if (!**submat) return PetscError(((MPI_Comm)0x44000001),
6672,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",6); if (
!PetscCheckPointer(**submat,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),6672,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,6); if (((PetscObject)(**submat))->classid != MAT_CLASSID
) { if (((PetscObject)(**submat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6672,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,6); else return PetscError(((MPI_Comm)0x44000001),6672,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",6); } } while (0)
;
6673 }
6674 if (!mat->ops->createsubmatrices) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),6674,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
6675 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6675,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6676 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6676,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6677 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6677,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6678
6679 ierr = PetscLogEventBegin(MAT_CreateSubMats,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMats].active) ? (*PetscLogPLB)((MAT_CreateSubMats
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6679,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6680 ierr = (*mat->ops->createsubmatrices)(mat,n,irow,icol,scall,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6680,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6681 ierr = PetscLogEventEnd(MAT_CreateSubMats,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMats].active) ? (*PetscLogPLE)((MAT_CreateSubMats
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6681,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6682 for (i=0; i<n; i++) {
6683 (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
6684 if (mat->symmetric || mat->structurally_symmetric || mat->hermitian) {
6685 ierr = ISEqual(irow[i],icol[i],&eq);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6686 if (eq) {
6687 if (mat->symmetric) {
6688 ierr = MatSetOption((*submat)[i],MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6689 } else if (mat->hermitian) {
6690 ierr = MatSetOption((*submat)[i],MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6690,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6691 } else if (mat->structurally_symmetric) {
6692 ierr = MatSetOption((*submat)[i],MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6692,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6693 }
6694 }
6695 }
6696 }
6697 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6698}
6699
6700/*@C
6701 MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of IS that may live on subcomms).
6702
6703 Collective on Mat
6704
6705 Input Parameters:
6706+ mat - the matrix
6707. n - the number of submatrixes to be extracted
6708. irow, icol - index sets of rows and columns to extract
6709- scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
6710
6711 Output Parameter:
6712. submat - the array of submatrices
6713
6714 Level: advanced
6715
6716
6717.seealso: MatCreateSubMatrices(), MatCreateSubMatrix(), MatGetRow(), MatGetDiagonal(), MatReuse
6718@*/
6719PetscErrorCode MatCreateSubMatricesMPI(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
6720{
6721 PetscErrorCode ierr;
6722 PetscInt i;
6723 PetscBool eq;
6724
6725 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6725; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6726 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6726,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6726,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6726,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6726,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6727 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6727,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6728 if (n) {
6729 PetscValidPointer(irow,3)do { if (!irow) return PetscError(((MPI_Comm)0x44000001),6729
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(irow,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6729,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
6730 PetscValidHeaderSpecific(*irow,IS_CLASSID,3)do { if (!*irow) return PetscError(((MPI_Comm)0x44000001),6730
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(*irow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6730,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(*irow))->classid != IS_CLASSID) { if
(((PetscObject)(*irow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6730,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6730,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6731 PetscValidPointer(icol,4)do { if (!icol) return PetscError(((MPI_Comm)0x44000001),6731
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(icol,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6731,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
6732 PetscValidHeaderSpecific(*icol,IS_CLASSID,4)do { if (!*icol) return PetscError(((MPI_Comm)0x44000001),6732
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",4); if (
!PetscCheckPointer(*icol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6732,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(*icol))->classid != IS_CLASSID) { if
(((PetscObject)(*icol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6732,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),6732,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
6733 }
6734 PetscValidPointer(submat,6)do { if (!submat) return PetscError(((MPI_Comm)0x44000001),6734
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",6); if
(!PetscCheckPointer(submat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),6734,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",6);
} while (0)
;
6735 if (n && scall == MAT_REUSE_MATRIX) {
6736 PetscValidPointer(*submat,6)do { if (!*submat) return PetscError(((MPI_Comm)0x44000001),6736
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",6); if
(!PetscCheckPointer(*submat,PETSC_CHAR)) return PetscError((
(MPI_Comm)0x44000001),6736,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",6);
} while (0)
;
6737 PetscValidHeaderSpecific(**submat,MAT_CLASSID,6)do { if (!**submat) return PetscError(((MPI_Comm)0x44000001),
6737,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",6); if (
!PetscCheckPointer(**submat,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),6737,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,6); if (((PetscObject)(**submat))->classid != MAT_CLASSID
) { if (((PetscObject)(**submat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),6737,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,6); else return PetscError(((MPI_Comm)0x44000001),6737,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",6); } } while (0)
;
6738 }
6739 if (!mat->ops->createsubmatricesmpi) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),6739,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
6740 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6740,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6741 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6741,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6742 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6742,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6743
6744 ierr = PetscLogEventBegin(MAT_CreateSubMats,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMats].active) ? (*PetscLogPLB)((MAT_CreateSubMats
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6744,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6745 ierr = (*mat->ops->createsubmatricesmpi)(mat,n,irow,icol,scall,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6745,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6746 ierr = PetscLogEventEnd(MAT_CreateSubMats,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMats].active) ? (*PetscLogPLE)((MAT_CreateSubMats
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6746,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6747 for (i=0; i<n; i++) {
6748 if (mat->symmetric || mat->structurally_symmetric || mat->hermitian) {
6749 ierr = ISEqual(irow[i],icol[i],&eq);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6749,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6750 if (eq) {
6751 if (mat->symmetric) {
6752 ierr = MatSetOption((*submat)[i],MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6752,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6753 } else if (mat->hermitian) {
6754 ierr = MatSetOption((*submat)[i],MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6754,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6755 } else if (mat->structurally_symmetric) {
6756 ierr = MatSetOption((*submat)[i],MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6756,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6757 }
6758 }
6759 }
6760 }
6761 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6762}
6763
6764/*@C
6765 MatDestroyMatrices - Destroys an array of matrices.
6766
6767 Collective on Mat
6768
6769 Input Parameters:
6770+ n - the number of local matrices
6771- mat - the matrices (note that this is a pointer to the array of matrices)
6772
6773 Level: advanced
6774
6775 Notes:
6776 Frees not only the matrices, but also the array that contains the matrices
6777 In Fortran will not free the array.
6778
6779.seealso: MatCreateSubMatrices() MatDestroySubMatrices()
6780@*/
6781PetscErrorCode MatDestroyMatrices(PetscInt n,Mat *mat[])
6782{
6783 PetscErrorCode ierr;
6784 PetscInt i;
6785
6786 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6786; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6787 if (!*mat) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6788 if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to destroy negative number of matrices %D",n)return PetscError(((MPI_Comm)0x44000001),6788,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,63,PETSC_ERROR_INITIAL,"Trying to destroy negative number of matrices %D"
,n)
;
6789 PetscValidPointer(mat,2)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6789,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(mat,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6789,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
6790
6791 for (i=0; i<n; i++) {
6792 ierr = MatDestroy(&(*mat)[i]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6792,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6793 }
6794
6795 /* memory is allocated even if n = 0 */
6796 ierr = PetscFree(*mat)((*PetscTrFree)((void*)(*mat),6796,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((*mat) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6796,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6797 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6798}
6799
6800/*@C
6801 MatDestroySubMatrices - Destroys a set of matrices obtained with MatCreateSubMatrices().
6802
6803 Collective on Mat
6804
6805 Input Parameters:
6806+ n - the number of local matrices
6807- mat - the matrices (note that this is a pointer to the array of matrices, just to match the calling
6808 sequence of MatCreateSubMatrices())
6809
6810 Level: advanced
6811
6812 Notes:
6813 Frees not only the matrices, but also the array that contains the matrices
6814 In Fortran will not free the array.
6815
6816.seealso: MatCreateSubMatrices()
6817@*/
6818PetscErrorCode MatDestroySubMatrices(PetscInt n,Mat *mat[])
6819{
6820 PetscErrorCode ierr;
6821 Mat mat0;
6822
6823 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6823; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6824 if (!*mat) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6825 /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
6826 if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to destroy negative number of matrices %D",n)return PetscError(((MPI_Comm)0x44000001),6826,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,63,PETSC_ERROR_INITIAL,"Trying to destroy negative number of matrices %D"
,n)
;
6827 PetscValidPointer(mat,2)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6827,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(mat,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6827,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
6828
6829 mat0 = (*mat)[0];
6830 if (mat0 && mat0->ops->destroysubmatrices) {
6831 ierr = (mat0->ops->destroysubmatrices)(n,mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6831,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6832 } else {
6833 ierr = MatDestroyMatrices(n,mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6833,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6834 }
6835 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6836}
6837
6838/*@C
6839 MatGetSeqNonzeroStructure - Extracts the sequential nonzero structure from a matrix.
6840
6841 Collective on Mat
6842
6843 Input Parameters:
6844. mat - the matrix
6845
6846 Output Parameter:
6847. matstruct - the sequential matrix with the nonzero structure of mat
6848
6849 Level: intermediate
6850
6851.seealso: MatDestroySeqNonzeroStructure(), MatCreateSubMatrices(), MatDestroyMatrices()
6852@*/
6853PetscErrorCode MatGetSeqNonzeroStructure(Mat mat,Mat *matstruct)
6854{
6855 PetscErrorCode ierr;
6856
6857 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6857; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6858 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6858,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6858,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6858,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6858,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6859 PetscValidPointer(matstruct,2)do { if (!matstruct) return PetscError(((MPI_Comm)0x44000001)
,6859,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(matstruct,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),6859,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
6860
6861 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6861,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6862 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6862,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6863 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6863,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6864
6865 if (!mat->ops->getseqnonzerostructure) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not for matrix type %s\n",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),6865,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Not for matrix type %s\n",((PetscObject)mat)->type_name)
;
6866 ierr = PetscLogEventBegin(MAT_GetSeqNonzeroStructure,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetSeqNonzeroStructure].active) ? (*PetscLogPLB)((MAT_GetSeqNonzeroStructure
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6866,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6867 ierr = (*mat->ops->getseqnonzerostructure)(mat,matstruct);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6867,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6868 ierr = PetscLogEventEnd(MAT_GetSeqNonzeroStructure,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetSeqNonzeroStructure].active) ? (*PetscLogPLE)((MAT_GetSeqNonzeroStructure
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6868,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6869 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6870}
6871
6872/*@C
6873 MatDestroySeqNonzeroStructure - Destroys matrix obtained with MatGetSeqNonzeroStructure().
6874
6875 Collective on Mat
6876
6877 Input Parameters:
6878. mat - the matrix (note that this is a pointer to the array of matrices, just to match the calling
6879 sequence of MatGetSequentialNonzeroStructure())
6880
6881 Level: advanced
6882
6883 Notes:
6884 Frees not only the matrices, but also the array that contains the matrices
6885
6886.seealso: MatGetSeqNonzeroStructure()
6887@*/
6888PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
6889{
6890 PetscErrorCode ierr;
6891
6892 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6892; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6893 PetscValidPointer(mat,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6893,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",1); if
(!PetscCheckPointer(mat,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),6893,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",1);
} while (0)
;
6894 ierr = MatDestroy(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6894,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6895 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6896}
6897
6898/*@
6899 MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
6900 replaces the index sets by larger ones that represent submatrices with
6901 additional overlap.
6902
6903 Collective on Mat
6904
6905 Input Parameters:
6906+ mat - the matrix
6907. n - the number of index sets
6908. is - the array of index sets (these index sets will changed during the call)
6909- ov - the additional overlap requested
6910
6911 Options Database:
6912. -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
6913
6914 Level: developer
6915
6916
6917.seealso: MatCreateSubMatrices()
6918@*/
6919PetscErrorCode MatIncreaseOverlap(Mat mat,PetscInt n,IS is[],PetscInt ov)
6920{
6921 PetscErrorCode ierr;
6922
6923 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6923; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6924 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6924,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6924,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6924,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6924,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6925 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6925,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6926 if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have one or more domains, you have %D",n)return PetscError(((MPI_Comm)0x44000001),6926,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,63,PETSC_ERROR_INITIAL,"Must have one or more domains, you have %D"
,n)
;
6927 if (n) {
6928 PetscValidPointer(is,3)do { if (!is) return PetscError(((MPI_Comm)0x44000001),6928,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(is
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),6928,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
6929 PetscValidHeaderSpecific(*is,IS_CLASSID,3)do { if (!*is) return PetscError(((MPI_Comm)0x44000001),6929,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(*is,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6929,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(*is))->classid != IS_CLASSID) { if
(((PetscObject)(*is))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6929,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6929,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6930 }
6931 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6931,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6932 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6932,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6933 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6933,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6934
6935 if (!ov) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6936 if (!mat->ops->increaseoverlap) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),6936,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
6937 ierr = PetscLogEventBegin(MAT_IncreaseOverlap,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_IncreaseOverlap].active) ? (*PetscLogPLB)((MAT_IncreaseOverlap
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6937,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6938 ierr = (*mat->ops->increaseoverlap)(mat,n,is,ov);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6938,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6939 ierr = PetscLogEventEnd(MAT_IncreaseOverlap,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_IncreaseOverlap].active) ? (*PetscLogPLE)((MAT_IncreaseOverlap
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6939,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6940 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6941}
6942
6943
6944PetscErrorCode MatIncreaseOverlapSplit_Single(Mat,IS*,PetscInt);
6945
6946/*@
6947 MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
6948 a sub communicator, replaces the index sets by larger ones that represent submatrices with
6949 additional overlap.
6950
6951 Collective on Mat
6952
6953 Input Parameters:
6954+ mat - the matrix
6955. n - the number of index sets
6956. is - the array of index sets (these index sets will changed during the call)
6957- ov - the additional overlap requested
6958
6959 Options Database:
6960. -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
6961
6962 Level: developer
6963
6964
6965.seealso: MatCreateSubMatrices()
6966@*/
6967PetscErrorCode MatIncreaseOverlapSplit(Mat mat,PetscInt n,IS is[],PetscInt ov)
6968{
6969 PetscInt i;
6970 PetscErrorCode ierr;
6971
6972 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 6972; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
6973 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),6973,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6973,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6973,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),6973,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
6974 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),6974,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
6975 if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have one or more domains, you have %D",n)return PetscError(((MPI_Comm)0x44000001),6975,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,63,PETSC_ERROR_INITIAL,"Must have one or more domains, you have %D"
,n)
;
6976 if (n) {
6977 PetscValidPointer(is,3)do { if (!is) return PetscError(((MPI_Comm)0x44000001),6977,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(is
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),6977,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
6978 PetscValidHeaderSpecific(*is,IS_CLASSID,3)do { if (!*is) return PetscError(((MPI_Comm)0x44000001),6978,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(*is,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),6978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(*is))->classid != IS_CLASSID) { if
(((PetscObject)(*is))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),6978,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),6978,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
6979 }
6980 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),6980,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
6981 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),6981,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
6982 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),6982,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
6983 if (!ov) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6984 ierr = PetscLogEventBegin(MAT_IncreaseOverlap,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_IncreaseOverlap].active) ? (*PetscLogPLB)((MAT_IncreaseOverlap
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6985 for(i=0; i<n; i++){
6986 ierr = MatIncreaseOverlapSplit_Single(mat,&is[i],ov);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6987 }
6988 ierr = PetscLogEventEnd(MAT_IncreaseOverlap,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_IncreaseOverlap].active) ? (*PetscLogPLE)((MAT_IncreaseOverlap
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),6988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
6989 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
6990}
6991
6992
6993
6994
6995/*@
6996 MatGetBlockSize - Returns the matrix block size.
6997
6998 Not Collective
6999
7000 Input Parameter:
7001. mat - the matrix
7002
7003 Output Parameter:
7004. bs - block size
7005
7006 Notes:
7007 Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7008
7009 If the block size has not been set yet this routine returns 1.
7010
7011 Level: intermediate
7012
7013.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSizes()
7014@*/
7015PetscErrorCode MatGetBlockSize(Mat mat,PetscInt *bs)
7016{
7017 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7017; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7018 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7018,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7018,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7018,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7018,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7019 PetscValidIntPointer(bs,2)do { if (!bs) return PetscError(((MPI_Comm)0x44000001),7019,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(bs
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7019,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",2); } while (0
)
;
7020 *bs = PetscAbs(mat->rmap->bs)(((mat->rmap->bs) >= 0) ? (mat->rmap->bs) : (-
(mat->rmap->bs)))
;
7021 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7022}
7023
7024/*@
7025 MatGetBlockSizes - Returns the matrix block row and column sizes.
7026
7027 Not Collective
7028
7029 Input Parameter:
7030. mat - the matrix
7031
7032 Output Parameter:
7033+ rbs - row block size
7034- cbs - column block size
7035
7036 Notes:
7037 Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7038 If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7039
7040 If a block size has not been set yet this routine returns 1.
7041
7042 Level: intermediate
7043
7044.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSize(), MatSetBlockSizes()
7045@*/
7046PetscErrorCode MatGetBlockSizes(Mat mat,PetscInt *rbs, PetscInt *cbs)
7047{
7048 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7048; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7049 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7049,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7049,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7049,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7049,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7050 if (rbs) PetscValidIntPointer(rbs,2)do { if (!rbs) return PetscError(((MPI_Comm)0x44000001),7050,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(rbs,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7050,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,2); } while (0)
;
7051 if (cbs) PetscValidIntPointer(cbs,3)do { if (!cbs) return PetscError(((MPI_Comm)0x44000001),7051,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(cbs,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7051,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,3); } while (0)
;
7052 if (rbs) *rbs = PetscAbs(mat->rmap->bs)(((mat->rmap->bs) >= 0) ? (mat->rmap->bs) : (-
(mat->rmap->bs)))
;
7053 if (cbs) *cbs = PetscAbs(mat->cmap->bs)(((mat->cmap->bs) >= 0) ? (mat->cmap->bs) : (-
(mat->cmap->bs)))
;
7054 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7055}
7056
7057/*@
7058 MatSetBlockSize - Sets the matrix block size.
7059
7060 Logically Collective on Mat
7061
7062 Input Parameters:
7063+ mat - the matrix
7064- bs - block size
7065
7066 Notes:
7067 Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7068 This must be called before MatSetUp() or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7069
7070 For MATMPIAIJ and MATSEQAIJ matrix formats, this function can be called at a later stage, provided that the specified block size
7071 is compatible with the matrix local sizes.
7072
7073 Level: intermediate
7074
7075.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSizes(), MatGetBlockSizes()
7076@*/
7077PetscErrorCode MatSetBlockSize(Mat mat,PetscInt bs)
7078{
7079 PetscErrorCode ierr;
7080
7081 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7081; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7082 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7082,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7082,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7082,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7082,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7083 PetscValidLogicalCollectiveInt(mat,bs,2)do { PetscErrorCode _7_ierr; PetscInt b1[2],b2[2]; b1[0] = -bs
; b1[1] = bs; _7_ierr = (PetscAllreduceBarrierCheck(PetscObjectComm
((PetscObject)mat),2,7083,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),7083,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),7083
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Int value must be same on all processes, argument # %d"
,2); } while (0)
;
7084 ierr = MatSetBlockSizes(mat,bs,bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7084,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7085 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7086}
7087
7088/*@
7089 MatSetVariableBlockSizes - Sets a diagonal blocks of the matrix that need not be of the same size
7090
7091 Logically Collective on Mat
7092
7093 Input Parameters:
7094+ mat - the matrix
7095. nblocks - the number of blocks on this process
7096- bsizes - the block sizes
7097
7098 Notes:
7099 Currently used by PCVPBJACOBI for SeqAIJ matrices
7100
7101 Level: intermediate
7102
7103.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSizes(), MatGetBlockSizes(), MatGetVariableBlockSizes()
7104@*/
7105PetscErrorCode MatSetVariableBlockSizes(Mat mat,PetscInt nblocks,PetscInt *bsizes)
7106{
7107 PetscErrorCode ierr;
7108 PetscInt i,ncnt = 0, nlocal;
7109
7110 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7110; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7111 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7111,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7111,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7111,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7111,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7112 if (nblocks < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of local blocks must be great than or equal to zero")return PetscError(((MPI_Comm)0x44000001),7112,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Number of local blocks must be great than or equal to zero"
)
;
7113 ierr = MatGetLocalSize(mat,&nlocal,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7113,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7114 for (i=0; i<nblocks; i++) ncnt += bsizes[i];
7115 if (ncnt != nlocal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local block sizes %D does not equal local size of matrix %D",ncnt,nlocal)return PetscError(((MPI_Comm)0x44000001),7115,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"Sum of local block sizes %D does not equal local size of matrix %D"
,ncnt,nlocal)
;
7116 ierr = PetscFree(mat->bsizes)((*PetscTrFree)((void*)(mat->bsizes),7116,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((mat->bsizes) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7116,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7117 mat->nblocks = nblocks;
7118 ierr = PetscMalloc1(nblocks,&mat->bsizes)PetscMallocA(1,PETSC_FALSE,7118,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nblocks)*sizeof(**(&mat->bsizes)),(&mat->
bsizes))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7118,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7119 ierr = PetscArraycpy(mat->bsizes,bsizes,nblocks)((sizeof(*(mat->bsizes)) != sizeof(*(bsizes))) || PetscMemcpy
(mat->bsizes,bsizes,(nblocks)*sizeof(*(mat->bsizes))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7119,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7120 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7121}
7122
7123/*@C
7124 MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7125
7126 Logically Collective on Mat
7127
7128 Input Parameters:
7129. mat - the matrix
7130
7131 Output Parameters:
7132+ nblocks - the number of blocks on this process
7133- bsizes - the block sizes
7134
7135 Notes: Currently not supported from Fortran
7136
7137 Level: intermediate
7138
7139.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSizes(), MatGetBlockSizes(), MatSetVariableBlockSizes()
7140@*/
7141PetscErrorCode MatGetVariableBlockSizes(Mat mat,PetscInt *nblocks,const PetscInt **bsizes)
7142{
7143 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7143; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7144 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7144,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7144,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7144,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7144,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7145 *nblocks = mat->nblocks;
7146 *bsizes = mat->bsizes;
7147 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7148}
7149
7150/*@
7151 MatSetBlockSizes - Sets the matrix block row and column sizes.
7152
7153 Logically Collective on Mat
7154
7155 Input Parameters:
7156+ mat - the matrix
7157- rbs - row block size
7158- cbs - column block size
7159
7160 Notes:
7161 Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7162 If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7163 This must be called before MatSetUp() or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later
7164
7165 For MATMPIAIJ and MATSEQAIJ matrix formats, this function can be called at a later stage, provided that the specified block sizes
7166 are compatible with the matrix local sizes.
7167
7168 The row and column block size determine the blocksize of the "row" and "column" vectors returned by MatCreateVecs().
7169
7170 Level: intermediate
7171
7172.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSize(), MatGetBlockSizes()
7173@*/
7174PetscErrorCode MatSetBlockSizes(Mat mat,PetscInt rbs,PetscInt cbs)
7175{
7176 PetscErrorCode ierr;
7177
7178 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7178; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7179 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7179,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7179,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7179,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7179,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7180 PetscValidLogicalCollectiveInt(mat,rbs,2)do { PetscErrorCode _7_ierr; PetscInt b1[2],b2[2]; b1[0] = -rbs
; b1[1] = rbs; _7_ierr = (PetscAllreduceBarrierCheck(PetscObjectComm
((PetscObject)mat),2,7180,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),7180,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),7180
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Int value must be same on all processes, argument # %d"
,2); } while (0)
;
7181 PetscValidLogicalCollectiveInt(mat,cbs,3)do { PetscErrorCode _7_ierr; PetscInt b1[2],b2[2]; b1[0] = -cbs
; b1[1] = cbs; _7_ierr = (PetscAllreduceBarrierCheck(PetscObjectComm
((PetscObject)mat),2,7181,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((b1),(b2),(2),(((MPI_Datatype
)0x4c000405)),((MPI_Op)(0x58000001)),(PetscObjectComm((PetscObject
)mat)))));do {if (__builtin_expect(!!(_7_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),7181,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (-b2[0] != b2
[1]) return PetscError(PetscObjectComm((PetscObject)mat),7181
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"Int value must be same on all processes, argument # %d"
,3); } while (0)
;
7182 if (mat->ops->setblocksizes) {
7183 ierr = (*mat->ops->setblocksizes)(mat,rbs,cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7183,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7184 }
7185 if (mat->rmap->refcnt) {
7186 ISLocalToGlobalMapping l2g = NULL((void*)0);
7187 PetscLayout nmap = NULL((void*)0);
7188
7189 ierr = PetscLayoutDuplicate(mat->rmap,&nmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7189,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7190 if (mat->rmap->mapping) {
7191 ierr = ISLocalToGlobalMappingDuplicate(mat->rmap->mapping,&l2g);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7191,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7192 }
7193 ierr = PetscLayoutDestroy(&mat->rmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7193,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7194 mat->rmap = nmap;
7195 mat->rmap->mapping = l2g;
7196 }
7197 if (mat->cmap->refcnt) {
7198 ISLocalToGlobalMapping l2g = NULL((void*)0);
7199 PetscLayout nmap = NULL((void*)0);
7200
7201 ierr = PetscLayoutDuplicate(mat->cmap,&nmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7201,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7202 if (mat->cmap->mapping) {
7203 ierr = ISLocalToGlobalMappingDuplicate(mat->cmap->mapping,&l2g);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7203,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7204 }
7205 ierr = PetscLayoutDestroy(&mat->cmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7206 mat->cmap = nmap;
7207 mat->cmap->mapping = l2g;
7208 }
7209 ierr = PetscLayoutSetBlockSize(mat->rmap,rbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7210 ierr = PetscLayoutSetBlockSize(mat->cmap,cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7210,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7211 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7212}
7213
7214/*@
7215 MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7216
7217 Logically Collective on Mat
7218
7219 Input Parameters:
7220+ mat - the matrix
7221. fromRow - matrix from which to copy row block size
7222- fromCol - matrix from which to copy column block size (can be same as fromRow)
7223
7224 Level: developer
7225
7226.seealso: MatCreateSeqBAIJ(), MatCreateBAIJ(), MatGetBlockSize(), MatSetBlockSizes()
7227@*/
7228PetscErrorCode MatSetBlockSizesFromMats(Mat mat,Mat fromRow,Mat fromCol)
7229{
7230 PetscErrorCode ierr;
7231
7232 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7232; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7233 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7233,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7233,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7233,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7233,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7234 PetscValidHeaderSpecific(fromRow,MAT_CLASSID,2)do { if (!fromRow) return PetscError(((MPI_Comm)0x44000001),7234
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(fromRow,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),7234,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(fromRow))->classid != MAT_CLASSID)
{ if (((PetscObject)(fromRow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7234,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),7234,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
7235 PetscValidHeaderSpecific(fromCol,MAT_CLASSID,3)do { if (!fromCol) return PetscError(((MPI_Comm)0x44000001),7235
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(fromCol,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),7235,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(fromCol))->classid != MAT_CLASSID)
{ if (((PetscObject)(fromCol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7235,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7235,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7236 if (fromRow->rmap->bs > 0) {ierr = PetscLayoutSetBlockSize(mat->rmap,fromRow->rmap->bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7236,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
7237 if (fromCol->cmap->bs > 0) {ierr = PetscLayoutSetBlockSize(mat->cmap,fromCol->cmap->bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7237,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
7238 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7239}
7240
7241/*@
7242 MatResidual - Default routine to calculate the residual.
7243
7244 Collective on Mat
7245
7246 Input Parameters:
7247+ mat - the matrix
7248. b - the right-hand-side
7249- x - the approximate solution
7250
7251 Output Parameter:
7252. r - location to store the residual
7253
7254 Level: developer
7255
7256.seealso: PCMGSetResidual()
7257@*/
7258PetscErrorCode MatResidual(Mat mat,Vec b,Vec x,Vec r)
7259{
7260 PetscErrorCode ierr;
7261
7262 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7262; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7263 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7263,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7263,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7263,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7263,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7264 PetscValidHeaderSpecific(b,VEC_CLASSID,2)do { if (!b) return PetscError(((MPI_Comm)0x44000001),7264,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(b,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7264,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(b))->classid != VEC_CLASSID) { if (
((PetscObject)(b))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7264,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),7264,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
7265 PetscValidHeaderSpecific(x,VEC_CLASSID,3)do { if (!x) return PetscError(((MPI_Comm)0x44000001),7265,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7265,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7265,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7265,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7266 PetscValidHeaderSpecific(r,VEC_CLASSID,4)do { if (!r) return PetscError(((MPI_Comm)0x44000001),7266,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(r,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7266,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(r))->classid != VEC_CLASSID) { if (
((PetscObject)(r))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7266,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),7266,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
7267 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7267,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7268 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7268,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7269 ierr = PetscLogEventBegin(MAT_Residual,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Residual].active) ? (*PetscLogPLB)((MAT_Residual),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7269,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7270 if (!mat->ops->residual) {
7271 ierr = MatMult(mat,x,r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7271,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7272 ierr = VecAYPX(r,-1.0,b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7272,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7273 } else {
7274 ierr = (*mat->ops->residual)(mat,b,x,r);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7275 }
7276 ierr = PetscLogEventEnd(MAT_Residual,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Residual].active) ? (*PetscLogPLE)((MAT_Residual),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7277 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7278}
7279
7280/*@C
7281 MatGetRowIJ - Returns the compressed row storage i and j indices for sequential matrices.
7282
7283 Collective on Mat
7284
7285 Input Parameters:
7286+ mat - the matrix
7287. shift - 0 or 1 indicating we want the indices starting at 0 or 1
7288. symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be symmetrized
7289- inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7290 inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7291 always used.
7292
7293 Output Parameters:
7294+ n - number of rows in the (possibly compressed) matrix
7295. ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7296. ja - the column indices
7297- done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7298 are responsible for handling the case when done == PETSC_FALSE and ia and ja are not set
7299
7300 Level: developer
7301
7302 Notes:
7303 You CANNOT change any of the ia[] or ja[] values.
7304
7305 Use MatRestoreRowIJ() when you are finished accessing the ia[] and ja[] values.
7306
7307 Fortran Notes:
7308 In Fortran use
7309$
7310$ PetscInt ia(1), ja(1)
7311$ PetscOffset iia, jja
7312$ call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,iia,ja,jja,done,ierr)
7313$ ! Access the ith and jth entries via ia(iia + i) and ja(jja + j)
7314
7315 or
7316$
7317$ PetscInt, pointer :: ia(:),ja(:)
7318$ call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
7319$ ! Access the ith and jth entries via ia(i) and ja(j)
7320
7321.seealso: MatGetColumnIJ(), MatRestoreRowIJ(), MatSeqAIJGetArray()
7322@*/
7323PetscErrorCode MatGetRowIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool *done)
7324{
7325 PetscErrorCode ierr;
7326
7327 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7327; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7328 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7328,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7328,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7328,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7328,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7329 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7329,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7330 PetscValidIntPointer(n,5)do { if (!n) return PetscError(((MPI_Comm)0x44000001),7330,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7330,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",5); } while (0
)
;
7331 if (ia) PetscValidIntPointer(ia,6)do { if (!ia) return PetscError(((MPI_Comm)0x44000001),7331,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(ia
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7331,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",6); } while (0
)
;
7332 if (ja) PetscValidIntPointer(ja,7)do { if (!ja) return PetscError(((MPI_Comm)0x44000001),7332,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",7); if (!PetscCheckPointer(ja
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7332,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",7); } while (0
)
;
7333 PetscValidIntPointer(done,8)do { if (!done) return PetscError(((MPI_Comm)0x44000001),7333
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",8); if
(!PetscCheckPointer(done,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7333,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,8); } while (0)
;
7334 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7334,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7335 if (!mat->ops->getrowij) *done = PETSC_FALSE;
7336 else {
7337 *done = PETSC_TRUE;
7338 ierr = PetscLogEventBegin(MAT_GetRowIJ,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetRowIJ].active) ? (*PetscLogPLB)((MAT_GetRowIJ),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7338,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7339 ierr = (*mat->ops->getrowij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7339,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7340 ierr = PetscLogEventEnd(MAT_GetRowIJ,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetRowIJ].active) ? (*PetscLogPLE)((MAT_GetRowIJ),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7340,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7341 }
7342 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7343}
7344
7345/*@C
7346 MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
7347
7348 Collective on Mat
7349
7350 Input Parameters:
7351+ mat - the matrix
7352. shift - 1 or zero indicating we want the indices starting at 0 or 1
7353. symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7354 symmetrized
7355. inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7356 inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7357 always used.
7358. n - number of columns in the (possibly compressed) matrix
7359. ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
7360- ja - the row indices
7361
7362 Output Parameters:
7363. done - PETSC_TRUE or PETSC_FALSE, indicating whether the values have been returned
7364
7365 Level: developer
7366
7367.seealso: MatGetRowIJ(), MatRestoreColumnIJ()
7368@*/
7369PetscErrorCode MatGetColumnIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool *done)
7370{
7371 PetscErrorCode ierr;
7372
7373 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7373; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7374 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7374,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7374,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7374,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7374,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7375 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7375,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7376 PetscValidIntPointer(n,4)do { if (!n) return PetscError(((MPI_Comm)0x44000001),7376,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",4); if (!PetscCheckPointer(n,
PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7376,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",4); } while (0
)
;
7377 if (ia) PetscValidIntPointer(ia,5)do { if (!ia) return PetscError(((MPI_Comm)0x44000001),7377,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(ia
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7377,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",5); } while (0
)
;
7378 if (ja) PetscValidIntPointer(ja,6)do { if (!ja) return PetscError(((MPI_Comm)0x44000001),7378,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(ja
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7378,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",6); } while (0
)
;
7379 PetscValidIntPointer(done,7)do { if (!done) return PetscError(((MPI_Comm)0x44000001),7379
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",7); if
(!PetscCheckPointer(done,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7379,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,7); } while (0)
;
7380 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7380,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7381 if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
7382 else {
7383 *done = PETSC_TRUE;
7384 ierr = (*mat->ops->getcolumnij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7384,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7385 }
7386 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7387}
7388
7389/*@C
7390 MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with
7391 MatGetRowIJ().
7392
7393 Collective on Mat
7394
7395 Input Parameters:
7396+ mat - the matrix
7397. shift - 1 or zero indicating we want the indices starting at 0 or 1
7398. symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7399 symmetrized
7400. inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7401 inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7402 always used.
7403. n - size of (possibly compressed) matrix
7404. ia - the row pointers
7405- ja - the column indices
7406
7407 Output Parameters:
7408. done - PETSC_TRUE or PETSC_FALSE indicated that the values have been returned
7409
7410 Note:
7411 This routine zeros out n, ia, and ja. This is to prevent accidental
7412 us of the array after it has been restored. If you pass NULL, it will
7413 not zero the pointers. Use of ia or ja after MatRestoreRowIJ() is invalid.
7414
7415 Level: developer
7416
7417.seealso: MatGetRowIJ(), MatRestoreColumnIJ()
7418@*/
7419PetscErrorCode MatRestoreRowIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool *done)
7420{
7421 PetscErrorCode ierr;
7422
7423 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7423; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7424 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7424,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7424,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7424,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7424,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7425 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7425,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7426 if (ia) PetscValidIntPointer(ia,6)do { if (!ia) return PetscError(((MPI_Comm)0x44000001),7426,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(ia
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7426,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",6); } while (0
)
;
7427 if (ja) PetscValidIntPointer(ja,7)do { if (!ja) return PetscError(((MPI_Comm)0x44000001),7427,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",7); if (!PetscCheckPointer(ja
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7427,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",7); } while (0
)
;
7428 PetscValidIntPointer(done,8)do { if (!done) return PetscError(((MPI_Comm)0x44000001),7428
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",8); if
(!PetscCheckPointer(done,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,8); } while (0)
;
7429 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7429,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7430
7431 if (!mat->ops->restorerowij) *done = PETSC_FALSE;
7432 else {
7433 *done = PETSC_TRUE;
7434 ierr = (*mat->ops->restorerowij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7434,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7435 if (n) *n = 0;
7436 if (ia) *ia = NULL((void*)0);
7437 if (ja) *ja = NULL((void*)0);
7438 }
7439 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7440}
7441
7442/*@C
7443 MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with
7444 MatGetColumnIJ().
7445
7446 Collective on Mat
7447
7448 Input Parameters:
7449+ mat - the matrix
7450. shift - 1 or zero indicating we want the indices starting at 0 or 1
7451- symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7452 symmetrized
7453- inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7454 inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7455 always used.
7456
7457 Output Parameters:
7458+ n - size of (possibly compressed) matrix
7459. ia - the column pointers
7460. ja - the row indices
7461- done - PETSC_TRUE or PETSC_FALSE indicated that the values have been returned
7462
7463 Level: developer
7464
7465.seealso: MatGetColumnIJ(), MatRestoreRowIJ()
7466@*/
7467PetscErrorCode MatRestoreColumnIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool *done)
7468{
7469 PetscErrorCode ierr;
7470
7471 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7471; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7472 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7472,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7472,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7472,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7472,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7473 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7473,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7474 if (ia) PetscValidIntPointer(ia,5)do { if (!ia) return PetscError(((MPI_Comm)0x44000001),7474,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(ia
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7474,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",5); } while (0
)
;
7475 if (ja) PetscValidIntPointer(ja,6)do { if (!ja) return PetscError(((MPI_Comm)0x44000001),7475,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(ja
,PETSC_INT)) return PetscError(((MPI_Comm)0x44000001),7475,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer to PetscInt: Parameter # %d",6); } while (0
)
;
7476 PetscValidIntPointer(done,7)do { if (!done) return PetscError(((MPI_Comm)0x44000001),7476
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",7); if
(!PetscCheckPointer(done,PETSC_INT)) return PetscError(((MPI_Comm
)0x44000001),7476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,7); } while (0)
;
7477 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7477,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7478
7479 if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
7480 else {
7481 *done = PETSC_TRUE;
7482 ierr = (*mat->ops->restorecolumnij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7482,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7483 if (n) *n = 0;
7484 if (ia) *ia = NULL((void*)0);
7485 if (ja) *ja = NULL((void*)0);
7486 }
7487 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7488}
7489
7490/*@C
7491 MatColoringPatch -Used inside matrix coloring routines that
7492 use MatGetRowIJ() and/or MatGetColumnIJ().
7493
7494 Collective on Mat
7495
7496 Input Parameters:
7497+ mat - the matrix
7498. ncolors - max color value
7499. n - number of entries in colorarray
7500- colorarray - array indicating color for each column
7501
7502 Output Parameters:
7503. iscoloring - coloring generated using colorarray information
7504
7505 Level: developer
7506
7507.seealso: MatGetRowIJ(), MatGetColumnIJ()
7508
7509@*/
7510PetscErrorCode MatColoringPatch(Mat mat,PetscInt ncolors,PetscInt n,ISColoringValue colorarray[],ISColoring *iscoloring)
7511{
7512 PetscErrorCode ierr;
7513
7514 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7514; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7515 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7515,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7515,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7515,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7515,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7516 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7516,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7517 PetscValidIntPointer(colorarray,4)do { if (!colorarray) return PetscError(((MPI_Comm)0x44000001
),7517,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(colorarray,PETSC_INT)) return PetscError
(((MPI_Comm)0x44000001),7517,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscInt: Parameter # %d"
,4); } while (0)
;
7518 PetscValidPointer(iscoloring,5)do { if (!iscoloring) return PetscError(((MPI_Comm)0x44000001
),7518,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(iscoloring,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),7518,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
7519 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7519,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7520
7521 if (!mat->ops->coloringpatch) {
7522 ierr = ISColoringCreate(PetscObjectComm((PetscObject)mat),ncolors,n,colorarray,PETSC_OWN_POINTER,iscoloring);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7522,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7523 } else {
7524 ierr = (*mat->ops->coloringpatch)(mat,ncolors,n,colorarray,iscoloring);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7524,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7525 }
7526 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7527}
7528
7529
7530/*@
7531 MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
7532
7533 Logically Collective on Mat
7534
7535 Input Parameter:
7536. mat - the factored matrix to be reset
7537
7538 Notes:
7539 This routine should be used only with factored matrices formed by in-place
7540 factorization via ILU(0) (or by in-place LU factorization for the MATSEQDENSE
7541 format). This option can save memory, for example, when solving nonlinear
7542 systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
7543 ILU(0) preconditioner.
7544
7545 Note that one can specify in-place ILU(0) factorization by calling
7546.vb
7547 PCType(pc,PCILU);
7548 PCFactorSeUseInPlace(pc);
7549.ve
7550 or by using the options -pc_type ilu -pc_factor_in_place
7551
7552 In-place factorization ILU(0) can also be used as a local
7553 solver for the blocks within the block Jacobi or additive Schwarz
7554 methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
7555 for details on setting local solver options.
7556
7557 Most users should employ the simplified KSP interface for linear solvers
7558 instead of working directly with matrix algebra routines such as this.
7559 See, e.g., KSPCreate().
7560
7561 Level: developer
7562
7563.seealso: PCFactorSetUseInPlace(), PCFactorGetUseInPlace()
7564
7565@*/
7566PetscErrorCode MatSetUnfactored(Mat mat)
7567{
7568 PetscErrorCode ierr;
7569
7570 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7570; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7571 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7571,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7571,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7571,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7571,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7572 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7572,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7573 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7573,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7574 mat->factortype = MAT_FACTOR_NONE;
7575 if (!mat->ops->setunfactored) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7576 ierr = (*mat->ops->setunfactored)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7576,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7577 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7578}
7579
7580/*MC
7581 MatDenseGetArrayF90 - Accesses a matrix array from Fortran90.
7582
7583 Synopsis:
7584 MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
7585
7586 Not collective
7587
7588 Input Parameter:
7589. x - matrix
7590
7591 Output Parameters:
7592+ xx_v - the Fortran90 pointer to the array
7593- ierr - error code
7594
7595 Example of Usage:
7596.vb
7597 PetscScalar, pointer xx_v(:,:)
7598 ....
7599 call MatDenseGetArrayF90(x,xx_v,ierr)
7600 a = xx_v(3)
7601 call MatDenseRestoreArrayF90(x,xx_v,ierr)
7602.ve
7603
7604 Level: advanced
7605
7606.seealso: MatDenseRestoreArrayF90(), MatDenseGetArray(), MatDenseRestoreArray(), MatSeqAIJGetArrayF90()
7607
7608M*/
7609
7610/*MC
7611 MatDenseRestoreArrayF90 - Restores a matrix array that has been
7612 accessed with MatDenseGetArrayF90().
7613
7614 Synopsis:
7615 MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
7616
7617 Not collective
7618
7619 Input Parameters:
7620+ x - matrix
7621- xx_v - the Fortran90 pointer to the array
7622
7623 Output Parameter:
7624. ierr - error code
7625
7626 Example of Usage:
7627.vb
7628 PetscScalar, pointer xx_v(:,:)
7629 ....
7630 call MatDenseGetArrayF90(x,xx_v,ierr)
7631 a = xx_v(3)
7632 call MatDenseRestoreArrayF90(x,xx_v,ierr)
7633.ve
7634
7635 Level: advanced
7636
7637.seealso: MatDenseGetArrayF90(), MatDenseGetArray(), MatDenseRestoreArray(), MatSeqAIJRestoreArrayF90()
7638
7639M*/
7640
7641
7642/*MC
7643 MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran90.
7644
7645 Synopsis:
7646 MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
7647
7648 Not collective
7649
7650 Input Parameter:
7651. x - matrix
7652
7653 Output Parameters:
7654+ xx_v - the Fortran90 pointer to the array
7655- ierr - error code
7656
7657 Example of Usage:
7658.vb
7659 PetscScalar, pointer xx_v(:)
7660 ....
7661 call MatSeqAIJGetArrayF90(x,xx_v,ierr)
7662 a = xx_v(3)
7663 call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
7664.ve
7665
7666 Level: advanced
7667
7668.seealso: MatSeqAIJRestoreArrayF90(), MatSeqAIJGetArray(), MatSeqAIJRestoreArray(), MatDenseGetArrayF90()
7669
7670M*/
7671
7672/*MC
7673 MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
7674 accessed with MatSeqAIJGetArrayF90().
7675
7676 Synopsis:
7677 MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
7678
7679 Not collective
7680
7681 Input Parameters:
7682+ x - matrix
7683- xx_v - the Fortran90 pointer to the array
7684
7685 Output Parameter:
7686. ierr - error code
7687
7688 Example of Usage:
7689.vb
7690 PetscScalar, pointer xx_v(:)
7691 ....
7692 call MatSeqAIJGetArrayF90(x,xx_v,ierr)
7693 a = xx_v(3)
7694 call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
7695.ve
7696
7697 Level: advanced
7698
7699.seealso: MatSeqAIJGetArrayF90(), MatSeqAIJGetArray(), MatSeqAIJRestoreArray(), MatDenseRestoreArrayF90()
7700
7701M*/
7702
7703
7704/*@
7705 MatCreateSubMatrix - Gets a single submatrix on the same number of processors
7706 as the original matrix.
7707
7708 Collective on Mat
7709
7710 Input Parameters:
7711+ mat - the original matrix
7712. isrow - parallel IS containing the rows this processor should obtain
7713. iscol - parallel IS containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
7714- cll - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
7715
7716 Output Parameter:
7717. newmat - the new submatrix, of the same type as the old
7718
7719 Level: advanced
7720
7721 Notes:
7722 The submatrix will be able to be multiplied with vectors using the same layout as iscol.
7723
7724 Some matrix types place restrictions on the row and column indices, such
7725 as that they be sorted or that they be equal to each other.
7726
7727 The index sets may not have duplicate entries.
7728
7729 The first time this is called you should use a cll of MAT_INITIAL_MATRIX,
7730 the MatCreateSubMatrix() routine will create the newmat for you. Any additional calls
7731 to this routine with a mat of the same nonzero structure and with a call of MAT_REUSE_MATRIX
7732 will reuse the matrix generated the first time. You should call MatDestroy() on newmat when
7733 you are finished using it.
7734
7735 The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
7736 the input matrix.
7737
7738 If iscol is NULL then all columns are obtained (not supported in Fortran).
7739
7740 Example usage:
7741 Consider the following 8x8 matrix with 34 non-zero values, that is
7742 assembled across 3 processors. Let's assume that proc0 owns 3 rows,
7743 proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
7744 as follows:
7745
7746.vb
7747 1 2 0 | 0 3 0 | 0 4
7748 Proc0 0 5 6 | 7 0 0 | 8 0
7749 9 0 10 | 11 0 0 | 12 0
7750 -------------------------------------
7751 13 0 14 | 15 16 17 | 0 0
7752 Proc1 0 18 0 | 19 20 21 | 0 0
7753 0 0 0 | 22 23 0 | 24 0
7754 -------------------------------------
7755 Proc2 25 26 27 | 0 0 28 | 29 0
7756 30 0 0 | 31 32 33 | 0 34
7757.ve
7758
7759 Suppose isrow = [0 1 | 4 | 6 7] and iscol = [1 2 | 3 4 5 | 6]. The resulting submatrix is
7760
7761.vb
7762 2 0 | 0 3 0 | 0
7763 Proc0 5 6 | 7 0 0 | 8
7764 -------------------------------
7765 Proc1 18 0 | 19 20 21 | 0
7766 -------------------------------
7767 Proc2 26 27 | 0 0 28 | 29
7768 0 0 | 31 32 33 | 0
7769.ve
7770
7771
7772.seealso: MatCreateSubMatrices()
7773@*/
7774PetscErrorCode MatCreateSubMatrix(Mat mat,IS isrow,IS iscol,MatReuse cll,Mat *newmat)
7775{
7776 PetscErrorCode ierr;
7777 PetscMPIInt size;
7778 Mat *local;
7779 IS iscoltmp;
7780
7781 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7781; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7782 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7782,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7782,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7782,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7782,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7783 PetscValidHeaderSpecific(isrow,IS_CLASSID,2)do { if (!isrow) return PetscError(((MPI_Comm)0x44000001),7783
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(isrow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7783,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(isrow))->classid != IS_CLASSID) { if
(((PetscObject)(isrow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7783,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),7783,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
7784 if (iscol) PetscValidHeaderSpecific(iscol,IS_CLASSID,3)do { if (!iscol) return PetscError(((MPI_Comm)0x44000001),7784
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(iscol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7784,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(iscol))->classid != IS_CLASSID) { if
(((PetscObject)(iscol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7784,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7784,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7785 PetscValidPointer(newmat,5)do { if (!newmat) return PetscError(((MPI_Comm)0x44000001),7785
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(newmat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),7785,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
7786 if (cll == MAT_REUSE_MATRIX) PetscValidHeaderSpecific(*newmat,MAT_CLASSID,5)do { if (!*newmat) return PetscError(((MPI_Comm)0x44000001),7786
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",5); if (
!PetscCheckPointer(*newmat,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),7786,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(*newmat))->classid != MAT_CLASSID)
{ if (((PetscObject)(*newmat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),7786,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),7786,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",5); } } while (0)
;
7787 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7787,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7788 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),7788,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
7789 if (cll == MAT_IGNORE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Cannot use MAT_IGNORE_MATRIX")return PetscError(PetscObjectComm((PetscObject)mat),7789,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Cannot use MAT_IGNORE_MATRIX")
;
7790
7791 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7791,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
7792 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7792,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7793
7794 if (!iscol || isrow == iscol) {
7795 PetscBool stride;
7796 PetscMPIInt grabentirematrix = 0,grab;
7797 ierr = PetscObjectTypeCompare((PetscObject)isrow,ISSTRIDE"stride",&stride);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7797,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7798 if (stride) {
7799 PetscInt first,step,n,rstart,rend;
7800 ierr = ISStrideGetInfo(isrow,&first,&step);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7800,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7801 if (step == 1) {
7802 ierr = MatGetOwnershipRange(mat,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7802,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7803 if (rstart == first) {
7804 ierr = ISGetLocalSize(isrow,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7804,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7805 if (n == rend-rstart) {
7806 grabentirematrix = 1;
7807 }
7808 }
7809 }
7810 }
7811 ierr = MPIU_Allreduce(&grabentirematrix,&grab,1,MPI_INT,MPI_MIN,PetscObjectComm((PetscObject)mat))(PetscAllreduceBarrierCheck(PetscObjectComm((PetscObject)mat)
,1,7811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((PetscObjectComm
((PetscObject)mat))),0) || MPI_Allreduce((&grabentirematrix
),(&grab),(1),(((MPI_Datatype)0x4c000405)),((MPI_Op)(0x58000002
)),(PetscObjectComm((PetscObject)mat)))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7811,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7812 if (grab) {
7813 ierr = PetscInfo(mat,"Getting entire matrix as submatrix\n")PetscInfo_Private(__func__,mat,"Getting entire matrix as submatrix\n"
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7813,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7814 if (cll == MAT_INITIAL_MATRIX) {
7815 *newmat = mat;
7816 ierr = PetscObjectReference((PetscObject)mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7816,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7817 }
7818 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7819 }
7820 }
7821
7822 if (!iscol) {
7823 ierr = ISCreateStride(PetscObjectComm((PetscObject)mat),mat->cmap->n,mat->cmap->rstart,1,&iscoltmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7823,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7824 } else {
7825 iscoltmp = iscol;
7826 }
7827
7828 /* if original matrix is on just one processor then use submatrix generated */
7829 if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
7830 ierr = MatCreateSubMatrices(mat,1,&isrow,&iscoltmp,MAT_REUSE_MATRIX,&newmat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7830,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7831 goto setproperties;
7832 } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
7833 ierr = MatCreateSubMatrices(mat,1,&isrow,&iscoltmp,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7833,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7834 *newmat = *local;
7835 ierr = PetscFree(local)((*PetscTrFree)((void*)(local),7835,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((local) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7835,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7836 goto setproperties;
7837 } else if (!mat->ops->createsubmatrix) {
7838 /* Create a new matrix type that implements the operation using the full matrix */
7839 ierr = PetscLogEventBegin(MAT_CreateSubMat,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMat].active) ? (*PetscLogPLB)((MAT_CreateSubMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7839,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7840 switch (cll) {
7841 case MAT_INITIAL_MATRIX:
7842 ierr = MatCreateSubMatrixVirtual(mat,isrow,iscoltmp,newmat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7842,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7843 break;
7844 case MAT_REUSE_MATRIX:
7845 ierr = MatSubMatrixVirtualUpdate(*newmat,mat,isrow,iscoltmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7845,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7846 break;
7847 default: SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX")return PetscError(PetscObjectComm((PetscObject)mat),7847,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX"
)
;
7848 }
7849 ierr = PetscLogEventEnd(MAT_CreateSubMat,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMat].active) ? (*PetscLogPLE)((MAT_CreateSubMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7849,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7850 goto setproperties;
7851 }
7852
7853 if (!mat->ops->createsubmatrix) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(((MPI_Comm)0x44000001),7853,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)mat)->
type_name)
;
7854 ierr = PetscLogEventBegin(MAT_CreateSubMat,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMat].active) ? (*PetscLogPLB)((MAT_CreateSubMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7854,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7855 ierr = (*mat->ops->createsubmatrix)(mat,isrow,iscoltmp,cll,newmat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7855,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7856 ierr = PetscLogEventEnd(MAT_CreateSubMat,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_CreateSubMat].active) ? (*PetscLogPLE)((MAT_CreateSubMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7856,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7857
7858 /* Propagate symmetry information for diagonal blocks */
7859setproperties:
7860 if (isrow == iscoltmp) {
7861 if (mat->symmetric_set && mat->symmetric) {
7862 ierr = MatSetOption(*newmat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7862,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7863 }
7864 if (mat->structurally_symmetric_set && mat->structurally_symmetric) {
7865 ierr = MatSetOption(*newmat,MAT_STRUCTURALLY_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7865,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7866 }
7867 if (mat->hermitian_set && mat->hermitian) {
7868 ierr = MatSetOption(*newmat,MAT_HERMITIAN,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7868,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7869 }
7870 if (mat->spd_set && mat->spd) {
7871 ierr = MatSetOption(*newmat,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7871,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7872 }
7873 }
7874
7875 if (!iscol) {ierr = ISDestroy(&iscoltmp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7875,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
7876 if (*newmat && cll == MAT_INITIAL_MATRIX) {ierr = PetscObjectStateIncrease((PetscObject)*newmat)(((PetscObject)*newmat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7876,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
7877 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7878}
7879
7880/*@
7881 MatStashSetInitialSize - sets the sizes of the matrix stash, that is
7882 used during the assembly process to store values that belong to
7883 other processors.
7884
7885 Not Collective
7886
7887 Input Parameters:
7888+ mat - the matrix
7889. size - the initial size of the stash.
7890- bsize - the initial size of the block-stash(if used).
7891
7892 Options Database Keys:
7893+ -matstash_initial_size <size> or <size0,size1,...sizep-1>
7894- -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1>
7895
7896 Level: intermediate
7897
7898 Notes:
7899 The block-stash is used for values set with MatSetValuesBlocked() while
7900 the stash is used for values set with MatSetValues()
7901
7902 Run with the option -info and look for output of the form
7903 MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
7904 to determine the appropriate value, MM, to use for size and
7905 MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
7906 to determine the value, BMM to use for bsize
7907
7908
7909.seealso: MatAssemblyBegin(), MatAssemblyEnd(), Mat, MatStashGetInfo()
7910
7911@*/
7912PetscErrorCode MatStashSetInitialSize(Mat mat,PetscInt size, PetscInt bsize)
7913{
7914 PetscErrorCode ierr;
7915
7916 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7916; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7917 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),7917,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),7917,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),7917,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7917,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7918 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),7918,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
7919 ierr = MatStashSetInitialSize_Private(&mat->stash,size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7919,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7920 ierr = MatStashSetInitialSize_Private(&mat->bstash,bsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7920,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7921 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7922}
7923
7924/*@
7925 MatInterpolateAdd - w = y + A*x or A'*x depending on the shape of
7926 the matrix
7927
7928 Neighbor-wise Collective on Mat
7929
7930 Input Parameters:
7931+ mat - the matrix
7932. x,y - the vectors
7933- w - where the result is stored
7934
7935 Level: intermediate
7936
7937 Notes:
7938 w may be the same vector as y.
7939
7940 This allows one to use either the restriction or interpolation (its transpose)
7941 matrix to do the interpolation
7942
7943.seealso: MatMultAdd(), MatMultTransposeAdd(), MatRestrict()
7944
7945@*/
7946PetscErrorCode MatInterpolateAdd(Mat A,Vec x,Vec y,Vec w)
7947{
7948 PetscErrorCode ierr;
7949 PetscInt M,N,Ny;
7950
7951 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7951; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7952 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),7952,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7952,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7952,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7952,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7953 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),7953,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7953,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7953,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),7953,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
7954 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),7954,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7954,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7954,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7954,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7955 PetscValidHeaderSpecific(w,VEC_CLASSID,4)do { if (!w) return PetscError(((MPI_Comm)0x44000001),7955,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",4); if (!PetscCheckPointer(w,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7955,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(w))->classid != VEC_CLASSID) { if (
((PetscObject)(w))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7955,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),7955,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
7956 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),7956,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
7957 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7957,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
7958 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7958,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7959 ierr = VecGetSize(y,&Ny);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7959,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7960 if (M == Ny) {
7961 ierr = MatMultAdd(A,x,y,w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7961,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7962 } else {
7963 ierr = MatMultTransposeAdd(A,x,y,w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7963,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7964 }
7965 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
7966}
7967
7968/*@
7969 MatInterpolate - y = A*x or A'*x depending on the shape of
7970 the matrix
7971
7972 Neighbor-wise Collective on Mat
7973
7974 Input Parameters:
7975+ mat - the matrix
7976- x,y - the vectors
7977
7978 Level: intermediate
7979
7980 Notes:
7981 This allows one to use either the restriction or interpolation (its transpose)
7982 matrix to do the interpolation
7983
7984.seealso: MatMultAdd(), MatMultTransposeAdd(), MatRestrict()
7985
7986@*/
7987PetscErrorCode MatInterpolate(Mat A,Vec x,Vec y)
7988{
7989 PetscErrorCode ierr;
7990 PetscInt M,N,Ny;
7991
7992 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 7992; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
7993 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),7993,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7993,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7993,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),7993,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
7994 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),7994,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7994,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7994,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),7994,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
7995 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),7995,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),7995,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),7995,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),7995,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
7996 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),7996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
7997 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),7997,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
7998 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
7999 ierr = VecGetSize(y,&Ny);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),7999,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8000 if (M == Ny) {
8001 ierr = MatMult(A,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8001,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8002 } else {
8003 ierr = MatMultTranspose(A,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8003,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8004 }
8005 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8006}
8007
8008/*@
8009 MatRestrict - y = A*x or A'*x
8010
8011 Neighbor-wise Collective on Mat
8012
8013 Input Parameters:
8014+ mat - the matrix
8015- x,y - the vectors
8016
8017 Level: intermediate
8018
8019 Notes:
8020 This allows one to use either the restriction or interpolation (its transpose)
8021 matrix to do the restriction
8022
8023.seealso: MatMultAdd(), MatMultTransposeAdd(), MatInterpolate()
8024
8025@*/
8026PetscErrorCode MatRestrict(Mat A,Vec x,Vec y)
8027{
8028 PetscErrorCode ierr;
8029 PetscInt M,N,Ny;
8030
8031 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8031; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8032 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8032,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8032,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8032,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8032,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8033 PetscValidHeaderSpecific(x,VEC_CLASSID,2)do { if (!x) return PetscError(((MPI_Comm)0x44000001),8033,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(x,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8033,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(x))->classid != VEC_CLASSID) { if (
((PetscObject)(x))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8033,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8033,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8034 PetscValidHeaderSpecific(y,VEC_CLASSID,3)do { if (!y) return PetscError(((MPI_Comm)0x44000001),8034,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(y,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8034,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(y))->classid != VEC_CLASSID) { if (
((PetscObject)(y))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8034,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),8034,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
8035 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),8035,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
8036 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8036,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
8037
8038 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8038,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8039 ierr = VecGetSize(y,&Ny);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8039,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8040 if (M == Ny) {
8041 ierr = MatMult(A,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8041,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8042 } else {
8043 ierr = MatMultTranspose(A,x,y);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8043,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8044 }
8045 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8046}
8047
8048/*@
8049 MatGetNullSpace - retrieves the null space of a matrix.
8050
8051 Logically Collective on Mat
8052
8053 Input Parameters:
8054+ mat - the matrix
8055- nullsp - the null space object
8056
8057 Level: developer
8058
8059.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNearNullSpace(), MatSetNullSpace()
8060@*/
8061PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8062{
8063 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8063; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8064 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8064,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8064,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8064,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8064,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8065 PetscValidPointer(nullsp,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8065
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(nullsp,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8065,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8066 *nullsp = (mat->symmetric_set && mat->symmetric && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8067 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8068}
8069
8070/*@
8071 MatSetNullSpace - attaches a null space to a matrix.
8072
8073 Logically Collective on Mat
8074
8075 Input Parameters:
8076+ mat - the matrix
8077- nullsp - the null space object
8078
8079 Level: advanced
8080
8081 Notes:
8082 This null space is used by the linear solvers. Overwrites any previous null space that may have been attached
8083
8084 For inconsistent singular systems (linear systems where the right hand side is not in the range of the operator) you also likely should
8085 call MatSetTransposeNullSpace(). This allows the linear system to be solved in a least squares sense.
8086
8087 You can remove the null space by calling this routine with an nullsp of NULL
8088
8089
8090 The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8091 the domain of a matrix A (from R^n to R^m (m rows, n columns) R^n = the direct sum of the null space of A, n(A), + the range of A^T, R(A^T).
8092 Similarly R^m = direct sum n(A^T) + R(A). Hence the linear system A x = b has a solution only if b in R(A) (or correspondingly b is orthogonal to
8093 n(A^T)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
8094 the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n(A^T).
8095
8096 Krylov solvers can produce the minimal norm solution to the least squares problem by utilizing MatNullSpaceRemove().
8097
8098 If the matrix is known to be symmetric because it is an SBAIJ matrix or one as called MatSetOption(mat,MAT_SYMMETRIC or MAT_SYMMETRIC_ETERNAL,PETSC_TRUE); this
8099 routine also automatically calls MatSetTransposeNullSpace().
8100
8101.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNearNullSpace(), MatGetNullSpace(), MatSetTransposeNullSpace(), MatGetTransposeNullSpace(), MatNullSpaceRemove()
8102@*/
8103PetscErrorCode MatSetNullSpace(Mat mat,MatNullSpace nullsp)
8104{
8105 PetscErrorCode ierr;
8106
8107 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8107; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8108 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8108,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8108,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8108,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8108,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8109 if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8109
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(nullsp,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),8109,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(nullsp))->classid != MAT_NULLSPACE_CLASSID
) { if (((PetscObject)(nullsp))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),8109,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8109,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8110 if (nullsp) {ierr = PetscObjectReference((PetscObject)nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8110,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
8111 ierr = MatNullSpaceDestroy(&mat->nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8111,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8112 mat->nullsp = nullsp;
8113 if (mat->symmetric_set && mat->symmetric) {
8114 ierr = MatSetTransposeNullSpace(mat,nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8114,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8115 }
8116 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8117}
8118
8119/*@
8120 MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
8121
8122 Logically Collective on Mat
8123
8124 Input Parameters:
8125+ mat - the matrix
8126- nullsp - the null space object
8127
8128 Level: developer
8129
8130.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNearNullSpace(), MatSetTransposeNullSpace(), MatSetNullSpace(), MatGetNullSpace()
8131@*/
8132PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
8133{
8134 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8134; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8135 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8135,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8135,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8136 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8136,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8137 PetscValidPointer(nullsp,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8137
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(nullsp,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8137,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8138 *nullsp = (mat->symmetric_set && mat->symmetric && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
8139 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8140}
8141
8142/*@
8143 MatSetTransposeNullSpace - attaches a null space to a matrix.
8144
8145 Logically Collective on Mat
8146
8147 Input Parameters:
8148+ mat - the matrix
8149- nullsp - the null space object
8150
8151 Level: advanced
8152
8153 Notes:
8154 For inconsistent singular systems (linear systems where the right hand side is not in the range of the operator) this allows the linear system to be solved in a least squares sense.
8155 You must also call MatSetNullSpace()
8156
8157
8158 The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8159 the domain of a matrix A (from R^n to R^m (m rows, n columns) R^n = the direct sum of the null space of A, n(A), + the range of A^T, R(A^T).
8160 Similarly R^m = direct sum n(A^T) + R(A). Hence the linear system A x = b has a solution only if b in R(A) (or correspondingly b is orthogonal to
8161 n(A^T)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
8162 the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n(A^T).
8163
8164 Krylov solvers can produce the minimal norm solution to the least squares problem by utilizing MatNullSpaceRemove().
8165
8166.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNearNullSpace(), MatGetNullSpace(), MatSetNullSpace(), MatGetTransposeNullSpace(), MatNullSpaceRemove()
8167@*/
8168PetscErrorCode MatSetTransposeNullSpace(Mat mat,MatNullSpace nullsp)
8169{
8170 PetscErrorCode ierr;
8171
8172 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8172; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8173 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8173,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8173,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8173,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8173,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8174 if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8174
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(nullsp,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),8174,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(nullsp))->classid != MAT_NULLSPACE_CLASSID
) { if (((PetscObject)(nullsp))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),8174,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8174,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8175 if (nullsp) {ierr = PetscObjectReference((PetscObject)nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8175,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
8176 ierr = MatNullSpaceDestroy(&mat->transnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8176,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8177 mat->transnullsp = nullsp;
8178 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8179}
8180
8181/*@
8182 MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
8183 This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
8184
8185 Logically Collective on Mat
8186
8187 Input Parameters:
8188+ mat - the matrix
8189- nullsp - the null space object
8190
8191 Level: advanced
8192
8193 Notes:
8194 Overwrites any previous near null space that may have been attached
8195
8196 You can remove the null space by calling this routine with an nullsp of NULL
8197
8198.seealso: MatCreate(), MatNullSpaceCreate(), MatSetNullSpace(), MatNullSpaceCreateRigidBody(), MatGetNearNullSpace()
8199@*/
8200PetscErrorCode MatSetNearNullSpace(Mat mat,MatNullSpace nullsp)
8201{
8202 PetscErrorCode ierr;
8203
8204 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8204; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8205 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8205,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8205,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8206 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8207 if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8207
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(nullsp,PETSC_OBJECT)) return PetscError(((
MPI_Comm)0x44000001),8207,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(nullsp))->classid != MAT_NULLSPACE_CLASSID
) { if (((PetscObject)(nullsp))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),8207,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8207,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8208 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8208,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
8209 if (nullsp) {ierr = PetscObjectReference((PetscObject)nullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
8210 ierr = MatNullSpaceDestroy(&mat->nearnullsp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8210,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8211 mat->nearnullsp = nullsp;
8212 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8213}
8214
8215/*@
8216 MatGetNearNullSpace -Get null space attached with MatSetNearNullSpace()
8217
8218 Not Collective
8219
8220 Input Parameters:
8221. mat - the matrix
8222
8223 Output Parameters:
8224. nullsp - the null space object, NULL if not set
8225
8226 Level: developer
8227
8228.seealso: MatSetNearNullSpace(), MatGetNullSpace(), MatNullSpaceCreate()
8229@*/
8230PetscErrorCode MatGetNearNullSpace(Mat mat,MatNullSpace *nullsp)
8231{
8232 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8232; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8233 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8233,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8233,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8233,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8233,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8234 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8234,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8235 PetscValidPointer(nullsp,2)do { if (!nullsp) return PetscError(((MPI_Comm)0x44000001),8235
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(nullsp,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8235,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8236 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8236,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
8237 *nullsp = mat->nearnullsp;
8238 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8239}
8240
8241/*@C
8242 MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
8243
8244 Collective on Mat
8245
8246 Input Parameters:
8247+ mat - the matrix
8248. row - row/column permutation
8249. fill - expected fill factor >= 1.0
8250- level - level of fill, for ICC(k)
8251
8252 Notes:
8253 Probably really in-place only when level of fill is zero, otherwise allocates
8254 new space to store factored matrix and deletes previous memory.
8255
8256 Most users should employ the simplified KSP interface for linear solvers
8257 instead of working directly with matrix algebra routines such as this.
8258 See, e.g., KSPCreate().
8259
8260 Level: developer
8261
8262
8263.seealso: MatICCFactorSymbolic(), MatLUFactorNumeric(), MatCholeskyFactor()
8264
8265 Developer Note: fortran interface is not autogenerated as the f90
8266 interface defintion cannot be generated correctly [due to MatFactorInfo]
8267
8268@*/
8269PetscErrorCode MatICCFactor(Mat mat,IS row,const MatFactorInfo *info)
8270{
8271 PetscErrorCode ierr;
8272
8273 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8273; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8274 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8274,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8274,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8274,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8275 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8275,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8276 if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2)do { if (!row) return PetscError(((MPI_Comm)0x44000001),8276,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(row,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(row))->classid != IS_CLASSID) { if
(((PetscObject)(row))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8276,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8277 PetscValidPointer(info,3)do { if (!info) return PetscError(((MPI_Comm)0x44000001),8277
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(info,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8277,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
8278 if (mat->rmap->N != mat->cmap->N) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"matrix must be square")return PetscError(PetscObjectComm((PetscObject)mat),8278,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"matrix must be square")
;
8279 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),8279,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
8280 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),8280,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
8281 if (!mat->ops->iccfactor) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),8281,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
8282 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8282,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
8283 ierr = (*mat->ops->iccfactor)(mat,row,info);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8283,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8284 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8284,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8285 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8286}
8287
8288/*@
8289 MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
8290 ghosted ones.
8291
8292 Not Collective
8293
8294 Input Parameters:
8295+ mat - the matrix
8296- diag = the diagonal values, including ghost ones
8297
8298 Level: developer
8299
8300 Notes:
8301 Works only for MPIAIJ and MPIBAIJ matrices
8302
8303.seealso: MatDiagonalScale()
8304@*/
8305PetscErrorCode MatDiagonalScaleLocal(Mat mat,Vec diag)
8306{
8307 PetscErrorCode ierr;
8308 PetscMPIInt size;
8309
8310 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8310; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8311 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8311,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8311,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8311,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8311,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8312 PetscValidHeaderSpecific(diag,VEC_CLASSID,2)do { if (!diag) return PetscError(((MPI_Comm)0x44000001),8312
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(diag,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8312,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(diag))->classid != VEC_CLASSID) { if
(((PetscObject)(diag))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),8312,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8312,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8313 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8313,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8314
8315 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be already assembled")return PetscError(PetscObjectComm((PetscObject)mat),8315,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Matrix must be already assembled")
;
8316 ierr = PetscLogEventBegin(MAT_Scale,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLB)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8316,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8317 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8317,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8318 if (size == 1) {
8319 PetscInt n,m;
8320 ierr = VecGetSize(diag,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8320,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8321 ierr = MatGetSize(mat,0,&m);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8321,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8322 if (m == n) {
8323 ierr = MatDiagonalScale(mat,0,diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8323,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8324 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only supported for sequential matrices when no ghost points/periodic conditions")return PetscError(((MPI_Comm)0x44000001),8324,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Only supported for sequential matrices when no ghost points/periodic conditions"
)
;
8325 } else {
8326 ierr = PetscUseMethod(mat,"MatDiagonalScaleLocal_C",(Mat,Vec),(mat,diag))0; do { PetscErrorCode (*_7_f)(Mat,Vec), _7_ierr; _7_ierr = PetscObjectQueryFunction_Private
(((PetscObject)(mat)),("MatDiagonalScaleLocal_C"),(PetscVoidFunction
*)(&_7_f));do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),8326,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_f) {_7_ierr
= (*_7_f)(mat,diag);do {if (__builtin_expect(!!(_7_ierr),0))
return PetscError(((MPI_Comm)0x44000001),8326,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0);} else return PetscError
(PetscObjectComm((PetscObject)(mat)),8326,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Cannot locate function %s in object"
,"MatDiagonalScaleLocal_C"); } while(0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8326,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8327 }
8328 ierr = PetscLogEventEnd(MAT_Scale,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Scale].active) ? (*PetscLogPLE)((MAT_Scale),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8328,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8329 ierr = PetscObjectStateIncrease((PetscObject)mat)(((PetscObject)mat)->state++,0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8329,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8330 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8331}
8332
8333/*@
8334 MatGetInertia - Gets the inertia from a factored matrix
8335
8336 Collective on Mat
8337
8338 Input Parameter:
8339. mat - the matrix
8340
8341 Output Parameters:
8342+ nneg - number of negative eigenvalues
8343. nzero - number of zero eigenvalues
8344- npos - number of positive eigenvalues
8345
8346 Level: advanced
8347
8348 Notes:
8349 Matrix must have been factored by MatCholeskyFactor()
8350
8351
8352@*/
8353PetscErrorCode MatGetInertia(Mat mat,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
8354{
8355 PetscErrorCode ierr;
8356
8357 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8357; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8358 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8358,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8358,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8358,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8358,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8359 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8359,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8360 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),8360,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
8361 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Numeric factor mat is not assembled")return PetscError(PetscObjectComm((PetscObject)mat),8361,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Numeric factor mat is not assembled")
;
8362 if (!mat->ops->getinertia) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),8362,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
8363 ierr = (*mat->ops->getinertia)(mat,nneg,nzero,npos);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8363,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8364 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8365}
8366
8367/* ----------------------------------------------------------------*/
8368/*@C
8369 MatSolves - Solves A x = b, given a factored matrix, for a collection of vectors
8370
8371 Neighbor-wise Collective on Mats
8372
8373 Input Parameters:
8374+ mat - the factored matrix
8375- b - the right-hand-side vectors
8376
8377 Output Parameter:
8378. x - the result vectors
8379
8380 Notes:
8381 The vectors b and x cannot be the same. I.e., one cannot
8382 call MatSolves(A,x,x).
8383
8384 Notes:
8385 Most users should employ the simplified KSP interface for linear solvers
8386 instead of working directly with matrix algebra routines such as this.
8387 See, e.g., KSPCreate().
8388
8389 Level: developer
8390
8391.seealso: MatSolveAdd(), MatSolveTranspose(), MatSolveTransposeAdd(), MatSolve()
8392@*/
8393PetscErrorCode MatSolves(Mat mat,Vecs b,Vecs x)
8394{
8395 PetscErrorCode ierr;
8396
8397 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8397; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8398 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8398,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8398,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8398,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8398,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8399 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8399,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8400 if (x == b) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors")return PetscError(PetscObjectComm((PetscObject)mat),8400,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",61,PETSC_ERROR_INITIAL
,"x and b must be different vectors")
;
8401 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix")return PetscError(PetscObjectComm((PetscObject)mat),8401,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Unfactored matrix")
;
8402 if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8403
8404 if (!mat->ops->solves) SETERRQ1(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name)return PetscError(PetscObjectComm((PetscObject)mat),8404,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Mat type %s",((PetscObject)mat)->type_name)
;
8405 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),8405,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
8406 ierr = PetscLogEventBegin(MAT_Solves,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Solves].active) ? (*PetscLogPLB)((MAT_Solves),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8406,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8407 ierr = (*mat->ops->solves)(mat,b,x);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8407,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8408 ierr = PetscLogEventEnd(MAT_Solves,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Solves].active) ? (*PetscLogPLE)((MAT_Solves),0,(PetscObject
)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0)) : 0
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8408,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8409 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8410}
8411
8412/*@
8413 MatIsSymmetric - Test whether a matrix is symmetric
8414
8415 Collective on Mat
8416
8417 Input Parameter:
8418+ A - the matrix to test
8419- tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
8420
8421 Output Parameters:
8422. flg - the result
8423
8424 Notes:
8425 For real numbers MatIsSymmetric() and MatIsHermitian() return identical results
8426
8427 Level: intermediate
8428
8429.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsStructurallySymmetric(), MatSetOption(), MatIsSymmetricKnown()
8430@*/
8431PetscErrorCode MatIsSymmetric(Mat A,PetscReal tol,PetscBool *flg)
8432{
8433 PetscErrorCode ierr;
8434
8435 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8435; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8436 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8436,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8436,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8436,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8436,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8437 PetscValidBoolPointer(flg,2)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8437,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),8437,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,2); } while (0)
;
8438
8439 if (!A->symmetric_set) {
8440 if (!A->ops->issymmetric) {
8441 MatType mattype;
8442 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8442,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8443 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for symmetric",mattype)return PetscError(((MPI_Comm)0x44000001),8443,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for symmetric"
,mattype)
;
8444 }
8445 ierr = (*A->ops->issymmetric)(A,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8445,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8446 if (!tol) {
8447 A->symmetric_set = PETSC_TRUE;
8448 A->symmetric = *flg;
8449 if (A->symmetric) {
8450 A->structurally_symmetric_set = PETSC_TRUE;
8451 A->structurally_symmetric = PETSC_TRUE;
8452 }
8453 }
8454 } else if (A->symmetric) {
8455 *flg = PETSC_TRUE;
8456 } else if (!tol) {
8457 *flg = PETSC_FALSE;
8458 } else {
8459 if (!A->ops->issymmetric) {
8460 MatType mattype;
8461 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8461,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8462 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for symmetric",mattype)return PetscError(((MPI_Comm)0x44000001),8462,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for symmetric"
,mattype)
;
8463 }
8464 ierr = (*A->ops->issymmetric)(A,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8464,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8465 }
8466 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8467}
8468
8469/*@
8470 MatIsHermitian - Test whether a matrix is Hermitian
8471
8472 Collective on Mat
8473
8474 Input Parameter:
8475+ A - the matrix to test
8476- tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
8477
8478 Output Parameters:
8479. flg - the result
8480
8481 Level: intermediate
8482
8483.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsStructurallySymmetric(), MatSetOption(),
8484 MatIsSymmetricKnown(), MatIsSymmetric()
8485@*/
8486PetscErrorCode MatIsHermitian(Mat A,PetscReal tol,PetscBool *flg)
8487{
8488 PetscErrorCode ierr;
8489
8490 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8490; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8491 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8491,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8491,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8491,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8491,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8492 PetscValidBoolPointer(flg,2)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8492,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),8492,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,2); } while (0)
;
8493
8494 if (!A->hermitian_set) {
8495 if (!A->ops->ishermitian) {
8496 MatType mattype;
8497 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8497,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8498 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for hermitian",mattype)return PetscError(((MPI_Comm)0x44000001),8498,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for hermitian"
,mattype)
;
8499 }
8500 ierr = (*A->ops->ishermitian)(A,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8500,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8501 if (!tol) {
8502 A->hermitian_set = PETSC_TRUE;
8503 A->hermitian = *flg;
8504 if (A->hermitian) {
8505 A->structurally_symmetric_set = PETSC_TRUE;
8506 A->structurally_symmetric = PETSC_TRUE;
8507 }
8508 }
8509 } else if (A->hermitian) {
8510 *flg = PETSC_TRUE;
8511 } else if (!tol) {
8512 *flg = PETSC_FALSE;
8513 } else {
8514 if (!A->ops->ishermitian) {
8515 MatType mattype;
8516 ierr = MatGetType(A,&mattype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8516,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8517 SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type <%s> does not support checking for hermitian",mattype)return PetscError(((MPI_Comm)0x44000001),8517,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Matrix of type <%s> does not support checking for hermitian"
,mattype)
;
8518 }
8519 ierr = (*A->ops->ishermitian)(A,tol,flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8519,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8520 }
8521 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8522}
8523
8524/*@
8525 MatIsSymmetricKnown - Checks the flag on the matrix to see if it is symmetric.
8526
8527 Not Collective
8528
8529 Input Parameter:
8530. A - the matrix to check
8531
8532 Output Parameters:
8533+ set - if the symmetric flag is set (this tells you if the next flag is valid)
8534- flg - the result
8535
8536 Level: advanced
8537
8538 Note: Does not check the matrix values directly, so this may return unknown (set = PETSC_FALSE). Use MatIsSymmetric()
8539 if you want it explicitly checked
8540
8541.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsStructurallySymmetric(), MatSetOption(), MatIsSymmetric()
8542@*/
8543PetscErrorCode MatIsSymmetricKnown(Mat A,PetscBool *set,PetscBool *flg)
8544{
8545 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8545; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8546 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8546,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8546,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8547 PetscValidPointer(set,2)do { if (!set) return PetscError(((MPI_Comm)0x44000001),8547,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(set,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8547,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8548 PetscValidBoolPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8548,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),8548,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,3); } while (0)
;
8549 if (A->symmetric_set) {
8550 *set = PETSC_TRUE;
8551 *flg = A->symmetric;
8552 } else {
8553 *set = PETSC_FALSE;
8554 }
8555 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8556}
8557
8558/*@
8559 MatIsHermitianKnown - Checks the flag on the matrix to see if it is hermitian.
8560
8561 Not Collective
8562
8563 Input Parameter:
8564. A - the matrix to check
8565
8566 Output Parameters:
8567+ set - if the hermitian flag is set (this tells you if the next flag is valid)
8568- flg - the result
8569
8570 Level: advanced
8571
8572 Note: Does not check the matrix values directly, so this may return unknown (set = PETSC_FALSE). Use MatIsHermitian()
8573 if you want it explicitly checked
8574
8575.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsStructurallySymmetric(), MatSetOption(), MatIsSymmetric()
8576@*/
8577PetscErrorCode MatIsHermitianKnown(Mat A,PetscBool *set,PetscBool *flg)
8578{
8579 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8579; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8580 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8580,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8580,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8581 PetscValidPointer(set,2)do { if (!set) return PetscError(((MPI_Comm)0x44000001),8581,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(set,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),8581,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
8582 PetscValidBoolPointer(flg,3)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8582,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),8582,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,3); } while (0)
;
8583 if (A->hermitian_set) {
8584 *set = PETSC_TRUE;
8585 *flg = A->hermitian;
8586 } else {
8587 *set = PETSC_FALSE;
8588 }
8589 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8590}
8591
8592/*@
8593 MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
8594
8595 Collective on Mat
8596
8597 Input Parameter:
8598. A - the matrix to test
8599
8600 Output Parameters:
8601. flg - the result
8602
8603 Level: intermediate
8604
8605.seealso: MatTranspose(), MatIsTranspose(), MatIsHermitian(), MatIsSymmetric(), MatSetOption()
8606@*/
8607PetscErrorCode MatIsStructurallySymmetric(Mat A,PetscBool *flg)
8608{
8609 PetscErrorCode ierr;
8610
8611 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8611; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8612 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),8612,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8612,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8612,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8612,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8613 PetscValidBoolPointer(flg,2)do { if (!flg) return PetscError(((MPI_Comm)0x44000001),8613,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(flg,PETSC_BOOL)) return PetscError(((MPI_Comm
)0x44000001),8613,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer to PetscBool: Parameter # %d"
,2); } while (0)
;
8614 if (!A->structurally_symmetric_set) {
8615 if (!A->ops->isstructurallysymmetric) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix does not support checking for structural symmetric")return PetscError(PetscObjectComm((PetscObject)A),8615,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Matrix does not support checking for structural symmetric")
;
8616 ierr = (*A->ops->isstructurallysymmetric)(A,&A->structurally_symmetric);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8616,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8617
8618 A->structurally_symmetric_set = PETSC_TRUE;
8619 }
8620 *flg = A->structurally_symmetric;
8621 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8622}
8623
8624/*@
8625 MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
8626 to be communicated to other processors during the MatAssemblyBegin/End() process
8627
8628 Not collective
8629
8630 Input Parameter:
8631. vec - the vector
8632
8633 Output Parameters:
8634+ nstash - the size of the stash
8635. reallocs - the number of additional mallocs incurred.
8636. bnstash - the size of the block stash
8637- breallocs - the number of additional mallocs incurred.in the block stash
8638
8639 Level: advanced
8640
8641.seealso: MatAssemblyBegin(), MatAssemblyEnd(), Mat, MatStashSetInitialSize()
8642
8643@*/
8644PetscErrorCode MatStashGetInfo(Mat mat,PetscInt *nstash,PetscInt *reallocs,PetscInt *bnstash,PetscInt *breallocs)
8645{
8646 PetscErrorCode ierr;
8647
8648 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8648; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8649 ierr = MatStashGetInfo_Private(&mat->stash,nstash,reallocs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8649,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8650 ierr = MatStashGetInfo_Private(&mat->bstash,bnstash,breallocs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8650,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8651 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8652}
8653
8654/*@C
8655 MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
8656 parallel layout
8657
8658 Collective on Mat
8659
8660 Input Parameter:
8661. mat - the matrix
8662
8663 Output Parameter:
8664+ right - (optional) vector that the matrix can be multiplied against
8665- left - (optional) vector that the matrix vector product can be stored in
8666
8667 Notes:
8668 The blocksize of the returned vectors is determined by the row and column block sizes set with MatSetBlockSizes() or the single blocksize (same for both) set by MatSetBlockSize().
8669
8670 Notes:
8671 These are new vectors which are not owned by the Mat, they should be destroyed in VecDestroy() when no longer needed
8672
8673 Level: advanced
8674
8675.seealso: MatCreate(), VecDestroy()
8676@*/
8677PetscErrorCode MatCreateVecs(Mat mat,Vec *right,Vec *left)
8678{
8679 PetscErrorCode ierr;
8680
8681 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8681; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8682 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8682,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8682,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8683 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8684 if (mat->ops->getvecs) {
8685 ierr = (*mat->ops->getvecs)(mat,right,left);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8685,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8686 } else {
8687 PetscInt rbs,cbs;
8688 ierr = MatGetBlockSizes(mat,&rbs,&cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8688,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8689 if (right) {
8690 if (mat->cmap->n < 0) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"PetscLayout for columns not yet setup")return PetscError(PetscObjectComm((PetscObject)mat),8690,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"PetscLayout for columns not yet setup")
;
8691 ierr = VecCreate(PetscObjectComm((PetscObject)mat),right);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8691,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8692 ierr = VecSetSizes(*right,mat->cmap->n,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8692,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8693 ierr = VecSetBlockSize(*right,cbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8693,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8694 ierr = VecSetType(*right,mat->defaultvectype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8694,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8695 ierr = PetscLayoutReference(mat->cmap,&(*right)->map);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8695,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8696 }
8697 if (left) {
8698 if (mat->rmap->n < 0) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"PetscLayout for rows not yet setup")return PetscError(PetscObjectComm((PetscObject)mat),8698,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"PetscLayout for rows not yet setup")
;
8699 ierr = VecCreate(PetscObjectComm((PetscObject)mat),left);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8699,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8700 ierr = VecSetSizes(*left,mat->rmap->n,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8700,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8701 ierr = VecSetBlockSize(*left,rbs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8701,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8702 ierr = VecSetType(*left,mat->defaultvectype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8702,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8703 ierr = PetscLayoutReference(mat->rmap,&(*left)->map);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8703,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8704 }
8705 }
8706 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8707}
8708
8709/*@C
8710 MatFactorInfoInitialize - Initializes a MatFactorInfo data structure
8711 with default values.
8712
8713 Not Collective
8714
8715 Input Parameters:
8716. info - the MatFactorInfo data structure
8717
8718
8719 Notes:
8720 The solvers are generally used through the KSP and PC objects, for example
8721 PCLU, PCILU, PCCHOLESKY, PCICC
8722
8723 Level: developer
8724
8725.seealso: MatFactorInfo
8726
8727 Developer Note: fortran interface is not autogenerated as the f90
8728 interface defintion cannot be generated correctly [due to MatFactorInfo]
8729
8730@*/
8731
8732PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
8733{
8734 PetscErrorCode ierr;
8735
8736 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8736; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8737 ierr = PetscMemzero(info,sizeof(MatFactorInfo));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8737,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8738 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8739}
8740
8741/*@
8742 MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
8743
8744 Collective on Mat
8745
8746 Input Parameters:
8747+ mat - the factored matrix
8748- is - the index set defining the Schur indices (0-based)
8749
8750 Notes:
8751 Call MatFactorSolveSchurComplement() or MatFactorSolveSchurComplementTranspose() after this call to solve a Schur complement system.
8752
8753 You can call MatFactorGetSchurComplement() or MatFactorCreateSchurComplement() after this call.
8754
8755 Level: developer
8756
8757.seealso: MatGetFactor(), MatFactorGetSchurComplement(), MatFactorRestoreSchurComplement(), MatFactorCreateSchurComplement(), MatFactorSolveSchurComplement(),
8758 MatFactorSolveSchurComplementTranspose(), MatFactorSolveSchurComplement()
8759
8760@*/
8761PetscErrorCode MatFactorSetSchurIS(Mat mat,IS is)
8762{
8763 PetscErrorCode ierr,(*f)(Mat,IS);
8764
8765 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8765; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8766 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),8766,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
8767 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),8767,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8767,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8767,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8767,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8768 PetscValidType(is,2)do { if (!((PetscObject)is)->type_name) return PetscError(
((MPI_Comm)0x44000001),8768,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)is)->class_name,2); } while (0)
;
8769 PetscValidHeaderSpecific(is,IS_CLASSID,2)do { if (!is) return PetscError(((MPI_Comm)0x44000001),8769,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(is,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),8769,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(is))->classid != IS_CLASSID) { if (
((PetscObject)(is))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8769,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8769,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8770 PetscCheckSameComm(mat,1,is,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)mat),PetscObjectComm((PetscObject
)is),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),8770,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),8770,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
8771 if (!mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),8771,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Only for factored matrix")
;
8772 ierr = PetscObjectQueryFunction((PetscObject)mat,"MatFactorSetSchurIS_C",&f)PetscObjectQueryFunction_Private(((PetscObject)mat),("MatFactorSetSchurIS_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8772,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8773 if (!f) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO")return PetscError(PetscObjectComm((PetscObject)mat),8773,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO"
)
;
8774 if (mat->schur) {
8775 ierr = MatDestroy(&mat->schur);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8775,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8776 }
8777 ierr = (*f)(mat,is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8777,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8778 if (!mat->schur) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_PLIB,"Schur complement has not been created")return PetscError(PetscObjectComm((PetscObject)mat),8778,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",77,PETSC_ERROR_INITIAL
,"Schur complement has not been created")
;
8779 ierr = MatFactorSetUpInPlaceSchur_Private(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8779,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8780 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8781}
8782
8783/*@
8784 MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
8785
8786 Logically Collective on Mat
8787
8788 Input Parameters:
8789+ F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
8790. S - location where to return the Schur complement, can be NULL
8791- status - the status of the Schur complement matrix, can be NULL
8792
8793 Notes:
8794 You must call MatFactorSetSchurIS() before calling this routine.
8795
8796 The routine provides a copy of the Schur matrix stored within the solver data structures.
8797 The caller must destroy the object when it is no longer needed.
8798 If MatFactorInvertSchurComplement() has been called, the routine gets back the inverse.
8799
8800 Use MatFactorGetSchurComplement() to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
8801
8802 Developer Notes:
8803 The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
8804 matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
8805
8806 See MatCreateSchurComplement() or MatGetSchurComplement() for ways to create virtual or approximate Schur complements.
8807
8808 Level: advanced
8809
8810 References:
8811
8812.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorGetSchurComplement(), MatFactorSchurStatus
8813@*/
8814PetscErrorCode MatFactorCreateSchurComplement(Mat F,Mat* S,MatFactorSchurStatus* status)
8815{
8816 PetscErrorCode ierr;
8817
8818 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8818; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8819 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),8819,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8819,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8819,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8819,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8820 if (S) PetscValidPointer(S,2)do { if (!S) return PetscError(((MPI_Comm)0x44000001),8820,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(S,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),8820,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",2); } while (0)
;
8821 if (status) PetscValidPointer(status,3)do { if (!status) return PetscError(((MPI_Comm)0x44000001),8821
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(status,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8821,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
8822 if (S) {
8823 PetscErrorCode (*f)(Mat,Mat*);
8824
8825 ierr = PetscObjectQueryFunction((PetscObject)F,"MatFactorCreateSchurComplement_C",&f)PetscObjectQueryFunction_Private(((PetscObject)F),("MatFactorCreateSchurComplement_C"
),(PetscVoidFunction*)(&f))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8825,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8826 if (f) {
8827 ierr = (*f)(F,S);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8827,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8828 } else {
8829 ierr = MatDuplicate(F->schur,MAT_COPY_VALUES,S);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8829,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8830 }
8831 }
8832 if (status) *status = F->schur_status;
8833 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8834}
8835
8836/*@
8837 MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
8838
8839 Logically Collective on Mat
8840
8841 Input Parameters:
8842+ F - the factored matrix obtained by calling MatGetFactor()
8843. *S - location where to return the Schur complement, can be NULL
8844- status - the status of the Schur complement matrix, can be NULL
8845
8846 Notes:
8847 You must call MatFactorSetSchurIS() before calling this routine.
8848
8849 Schur complement mode is currently implemented for sequential matrices.
8850 The routine returns a the Schur Complement stored within the data strutures of the solver.
8851 If MatFactorInvertSchurComplement() has previously been called, the returned matrix is actually the inverse of the Schur complement.
8852 The returned matrix should not be destroyed; the caller should call MatFactorRestoreSchurComplement() when the object is no longer needed.
8853
8854 Use MatFactorCreateSchurComplement() to create a copy of the Schur complement matrix that is within a factored matrix
8855
8856 See MatCreateSchurComplement() or MatGetSchurComplement() for ways to create virtual or approximate Schur complements.
8857
8858 Level: advanced
8859
8860 References:
8861
8862.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorRestoreSchurComplement(), MatFactorCreateSchurComplement(), MatFactorSchurStatus
8863@*/
8864PetscErrorCode MatFactorGetSchurComplement(Mat F,Mat* S,MatFactorSchurStatus* status)
8865{
8866 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8866; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8867 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),8867,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8867,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8867,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8867,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8868 if (S) PetscValidPointer(S,2)do { if (!S) return PetscError(((MPI_Comm)0x44000001),8868,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",2); if (!PetscCheckPointer(S,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),8868,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",2); } while (0)
;
8869 if (status) PetscValidPointer(status,3)do { if (!status) return PetscError(((MPI_Comm)0x44000001),8869
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(status,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),8869,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
8870 if (S) *S = F->schur;
8871 if (status) *status = F->schur_status;
8872 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8873}
8874
8875/*@
8876 MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to MatFactorGetSchurComplement
8877
8878 Logically Collective on Mat
8879
8880 Input Parameters:
8881+ F - the factored matrix obtained by calling MatGetFactor()
8882. *S - location where the Schur complement is stored
8883- status - the status of the Schur complement matrix (see MatFactorSchurStatus)
8884
8885 Notes:
8886
8887 Level: advanced
8888
8889 References:
8890
8891.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorRestoreSchurComplement(), MatFactorCreateSchurComplement(), MatFactorSchurStatus
8892@*/
8893PetscErrorCode MatFactorRestoreSchurComplement(Mat F,Mat* S,MatFactorSchurStatus status)
8894{
8895 PetscErrorCode ierr;
8896
8897 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8897; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8898 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),8898,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8898,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8898,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8898,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8899 if (S) {
8900 PetscValidHeaderSpecific(*S,MAT_CLASSID,2)do { if (!*S) return PetscError(((MPI_Comm)0x44000001),8900,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(*S,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),8900,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(*S))->classid != MAT_CLASSID) { if
(((PetscObject)(*S))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),8900,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8900,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8901 *S = NULL((void*)0);
8902 }
8903 F->schur_status = status;
8904 ierr = MatFactorUpdateSchurStatus_Private(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8904,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8905 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8906}
8907
8908/*@
8909 MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
8910
8911 Logically Collective on Mat
8912
8913 Input Parameters:
8914+ F - the factored matrix obtained by calling MatGetFactor()
8915. rhs - location where the right hand side of the Schur complement system is stored
8916- sol - location where the solution of the Schur complement system has to be returned
8917
8918 Notes:
8919 The sizes of the vectors should match the size of the Schur complement
8920
8921 Must be called after MatFactorSetSchurIS()
8922
8923 Level: advanced
8924
8925 References:
8926
8927.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorSolveSchurComplement()
8928@*/
8929PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
8930{
8931 PetscErrorCode ierr;
8932
8933 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8933; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8934 PetscValidType(F,1)do { if (!((PetscObject)F)->type_name) return PetscError((
(MPI_Comm)0x44000001),8934,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)F)->class_name,1); } while (0)
;
8935 PetscValidType(rhs,2)do { if (!((PetscObject)rhs)->type_name) return PetscError
(((MPI_Comm)0x44000001),8935,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)rhs)->class_name,2); } while (0)
;
8936 PetscValidType(sol,3)do { if (!((PetscObject)sol)->type_name) return PetscError
(((MPI_Comm)0x44000001),8936,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)sol)->class_name,3); } while (0)
;
8937 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),8937,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8937,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8937,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8937,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8938 PetscValidHeaderSpecific(rhs,VEC_CLASSID,2)do { if (!rhs) return PetscError(((MPI_Comm)0x44000001),8938,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(rhs,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8938,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(rhs))->classid != VEC_CLASSID) { if
(((PetscObject)(rhs))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8938,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8938,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8939 PetscValidHeaderSpecific(sol,VEC_CLASSID,3)do { if (!sol) return PetscError(((MPI_Comm)0x44000001),8939,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(sol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8939,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(sol))->classid != VEC_CLASSID) { if
(((PetscObject)(sol))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8939,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),8939,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
8940 PetscCheckSameComm(F,1,rhs,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)F),PetscObjectComm((PetscObject
)rhs),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),8940,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),8940,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
8941 PetscCheckSameComm(F,1,sol,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)F),PetscObjectComm((PetscObject
)sol),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),8941,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),8941,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
8942 ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8942,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8943 switch (F->schur_status) {
8944 case MAT_FACTOR_SCHUR_FACTORED:
8945 ierr = MatSolveTranspose(F->schur,rhs,sol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8945,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8946 break;
8947 case MAT_FACTOR_SCHUR_INVERTED:
8948 ierr = MatMultTranspose(F->schur,rhs,sol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8948,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8949 break;
8950 default:
8951 SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status)return PetscError(PetscObjectComm((PetscObject)F),8951,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Unhandled MatFactorSchurStatus %D",F->schur_status)
;
8952 break;
8953 }
8954 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
8955}
8956
8957/*@
8958 MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
8959
8960 Logically Collective on Mat
8961
8962 Input Parameters:
8963+ F - the factored matrix obtained by calling MatGetFactor()
8964. rhs - location where the right hand side of the Schur complement system is stored
8965- sol - location where the solution of the Schur complement system has to be returned
8966
8967 Notes:
8968 The sizes of the vectors should match the size of the Schur complement
8969
8970 Must be called after MatFactorSetSchurIS()
8971
8972 Level: advanced
8973
8974 References:
8975
8976.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorSolveSchurComplementTranspose()
8977@*/
8978PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
8979{
8980 PetscErrorCode ierr;
8981
8982 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 8982; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
8983 PetscValidType(F,1)do { if (!((PetscObject)F)->type_name) return PetscError((
(MPI_Comm)0x44000001),8983,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)F)->class_name,1); } while (0)
;
8984 PetscValidType(rhs,2)do { if (!((PetscObject)rhs)->type_name) return PetscError
(((MPI_Comm)0x44000001),8984,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)rhs)->class_name,2); } while (0)
;
8985 PetscValidType(sol,3)do { if (!((PetscObject)sol)->type_name) return PetscError
(((MPI_Comm)0x44000001),8985,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)sol)->class_name,3); } while (0)
;
8986 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),8986,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),8986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),8986,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),8986,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
8987 PetscValidHeaderSpecific(rhs,VEC_CLASSID,2)do { if (!rhs) return PetscError(((MPI_Comm)0x44000001),8987,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(rhs,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8987,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(rhs))->classid != VEC_CLASSID) { if
(((PetscObject)(rhs))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8987,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),8987,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
8988 PetscValidHeaderSpecific(sol,VEC_CLASSID,3)do { if (!sol) return PetscError(((MPI_Comm)0x44000001),8988,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(sol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),8988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(sol))->classid != VEC_CLASSID) { if
(((PetscObject)(sol))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),8988,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),8988,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
8989 PetscCheckSameComm(F,1,rhs,2)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)F),PetscObjectComm((PetscObject
)rhs),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),8989,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),8989,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,2,_7_flag); } while (0)
;
8990 PetscCheckSameComm(F,1,sol,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)F),PetscObjectComm((PetscObject
)sol),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)) return
PetscError(((MPI_Comm)0x44000001),8990,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),8990,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,1,3,_7_flag); } while (0)
;
8991 ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8991,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8992 switch (F->schur_status) {
8993 case MAT_FACTOR_SCHUR_FACTORED:
8994 ierr = MatSolve(F->schur,rhs,sol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8994,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8995 break;
8996 case MAT_FACTOR_SCHUR_INVERTED:
8997 ierr = MatMult(F->schur,rhs,sol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),8997,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
8998 break;
8999 default:
9000 SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status)return PetscError(PetscObjectComm((PetscObject)F),9000,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Unhandled MatFactorSchurStatus %D",F->schur_status)
;
9001 break;
9002 }
9003 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9004}
9005
9006/*@
9007 MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
9008
9009 Logically Collective on Mat
9010
9011 Input Parameters:
9012. F - the factored matrix obtained by calling MatGetFactor()
9013
9014 Notes:
9015 Must be called after MatFactorSetSchurIS().
9016
9017 Call MatFactorGetSchurComplement() or MatFactorCreateSchurComplement() AFTER this call to actually compute the inverse and get access to it.
9018
9019 Level: advanced
9020
9021 References:
9022
9023.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorGetSchurComplement(), MatFactorCreateSchurComplement()
9024@*/
9025PetscErrorCode MatFactorInvertSchurComplement(Mat F)
9026{
9027 PetscErrorCode ierr;
9028
9029 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9029; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9030 PetscValidType(F,1)do { if (!((PetscObject)F)->type_name) return PetscError((
(MPI_Comm)0x44000001),9030,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)F)->class_name,1); } while (0)
;
9031 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),9031,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9031,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9031,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9031,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9032 if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9033 ierr = MatFactorFactorizeSchurComplement(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9033,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9034 ierr = MatFactorInvertSchurComplement_Private(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9034,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9035 F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
9036 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9037}
9038
9039/*@
9040 MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
9041
9042 Logically Collective on Mat
9043
9044 Input Parameters:
9045. F - the factored matrix obtained by calling MatGetFactor()
9046
9047 Notes:
9048 Must be called after MatFactorSetSchurIS().
9049
9050 Level: advanced
9051
9052 References:
9053
9054.seealso: MatGetFactor(), MatFactorSetSchurIS(), MatFactorInvertSchurComplement()
9055@*/
9056PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
9057{
9058 PetscErrorCode ierr;
9059
9060 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9060; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9061 PetscValidType(F,1)do { if (!((PetscObject)F)->type_name) return PetscError((
(MPI_Comm)0x44000001),9061,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)F)->class_name,1); } while (0)
;
9062 PetscValidHeaderSpecific(F,MAT_CLASSID,1)do { if (!F) return PetscError(((MPI_Comm)0x44000001),9062,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(F,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9062,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(F))->classid != MAT_CLASSID) { if (
((PetscObject)(F))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9062,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9062,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9063 if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9064 ierr = MatFactorFactorizeSchurComplement_Private(F);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9064,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9065 F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
9066 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9067}
9068
9069PetscErrorCode MatPtAP_Basic(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
9070{
9071 Mat AP;
9072 PetscErrorCode ierr;
9073
9074 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9074; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9075 ierr = PetscInfo2(A,"Mat types %s and %s using basic PtAP\n",((PetscObject)A)->type_name,((PetscObject)P)->type_name)PetscInfo_Private(__func__,A,"Mat types %s and %s using basic PtAP\n"
,((PetscObject)A)->type_name,((PetscObject)P)->type_name
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9075,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9076 ierr = MatMatMult(A,P,MAT_INITIAL_MATRIX,PETSC_DEFAULT-2,&AP);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9076,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9077 ierr = MatTransposeMatMult(P,AP,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9077,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9078 ierr = MatDestroy(&AP);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9078,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9079 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9080}
9081
9082/*@
9083 MatPtAP - Creates the matrix product C = P^T * A * P
9084
9085 Neighbor-wise Collective on Mat
9086
9087 Input Parameters:
9088+ A - the matrix
9089. P - the projection matrix
9090. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9091- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use PETSC_DEFAULT if you do not have a good estimate
9092 if the result is a dense matrix this is irrelevent
9093
9094 Output Parameters:
9095. C - the product matrix
9096
9097 Notes:
9098 C will be created and must be destroyed by the user with MatDestroy().
9099
9100 For matrix types without special implementation the function fallbacks to MatMatMult() followed by MatTransposeMatMult().
9101
9102 Level: intermediate
9103
9104.seealso: MatPtAPSymbolic(), MatPtAPNumeric(), MatMatMult(), MatRARt()
9105@*/
9106PetscErrorCode MatPtAP(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
9107{
9108 PetscErrorCode ierr;
9109 PetscErrorCode (*fA)(Mat,Mat,MatReuse,PetscReal,Mat*);
9110 PetscErrorCode (*fP)(Mat,Mat,MatReuse,PetscReal,Mat*);
9111 PetscErrorCode (*ptap)(Mat,Mat,MatReuse,PetscReal,Mat*)=NULL((void*)0);
9112 PetscBool sametype;
9113
9114 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9114; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9115 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9115,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9115,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9115,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9115,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9116 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9116,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9117 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9117,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9118 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9118,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9119 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9119,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9120 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9120,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9121 PetscValidHeaderSpecific(P,MAT_CLASSID,2)do { if (!P) return PetscError(((MPI_Comm)0x44000001),9121,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(P,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9121,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(P))->classid != MAT_CLASSID) { if (
((PetscObject)(P))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9121,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9121,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9122 PetscValidType(P,2)do { if (!((PetscObject)P)->type_name) return PetscError((
(MPI_Comm)0x44000001),9122,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)P)->class_name,2); } while (0)
;
9123 MatCheckPreallocated(P,2)do { if (__builtin_expect(!!(!(P)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9123,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"P",__func__); } while (0)
;
9124 if (!P->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9124,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9125 if (P->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9125,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9126
9127 if (A->rmap->N != A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix A must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9127,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix A must be square, %D != %D",A->rmap->N,A->cmap
->N)
;
9128 if (P->rmap->N != A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9128,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->rmap->
N,A->cmap->N)
;
9129 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9130 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9130,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9131
9132 if (scall == MAT_REUSE_MATRIX) {
9133 PetscValidPointer(*C,5)do { if (!*C) return PetscError(((MPI_Comm)0x44000001),9133,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(*C
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9133,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",5); } while (0)
;
9134 PetscValidHeaderSpecific(*C,MAT_CLASSID,5)do { if (!*C) return PetscError(((MPI_Comm)0x44000001),9134,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",5); if (!PetscCheckPointer(*C,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),9134,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(*C))->classid != MAT_CLASSID) { if
(((PetscObject)(*C))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),9134,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),9134,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",5); } } while (0)
;
9135
9136 ierr = PetscLogEventBegin(MAT_PtAP,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAP].active) ? (*PetscLogPLB)((MAT_PtAP),0,(PetscObject
)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9136,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9137 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPNumeric].active) ? (*PetscLogPLB)((MAT_PtAPNumeric),
0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9137,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9138 if ((*C)->ops->ptapnumeric) {
9139 ierr = (*(*C)->ops->ptapnumeric)(A,P,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9139,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9140 } else {
9141 ierr = MatPtAP_Basic(A,P,scall,fill,C);
Value stored to 'ierr' is never read
9142 }
9143 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPNumeric].active) ? (*PetscLogPLE)((MAT_PtAPNumeric),
0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9143,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9144 ierr = PetscLogEventEnd(MAT_PtAP,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAP].active) ? (*PetscLogPLE)((MAT_PtAP),0,(PetscObject
)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9144,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9145 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9146 }
9147
9148 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9149 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9149,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9150
9151 fA = A->ops->ptap;
9152 fP = P->ops->ptap;
9153 ierr = PetscStrcmp(((PetscObject)A)->type_name,((PetscObject)P)->type_name,&sametype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9153,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9154 if (fP == fA && sametype) {
9155 ptap = fA;
9156 } else {
9157 /* dispatch based on the type of A and P from their PetscObject's PetscFunctionLists. */
9158 char ptapname[256];
9159 ierr = PetscStrncpy(ptapname,"MatPtAP_",sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9159,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9160 ierr = PetscStrlcat(ptapname,((PetscObject)A)->type_name,sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9160,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9161 ierr = PetscStrlcat(ptapname,"_",sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9161,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9162 ierr = PetscStrlcat(ptapname,((PetscObject)P)->type_name,sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9162,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9163 ierr = PetscStrlcat(ptapname,"_C",sizeof(ptapname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9163,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* e.g., ptapname = "MatPtAP_seqdense_seqaij_C" */
9164 ierr = PetscObjectQueryFunction((PetscObject)P,ptapname,&ptap)PetscObjectQueryFunction_Private(((PetscObject)P),(ptapname),
(PetscVoidFunction*)(&ptap))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9164,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9165 }
9166
9167 if (!ptap) ptap = MatPtAP_Basic;
9168 ierr = PetscLogEventBegin(MAT_PtAP,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAP].active) ? (*PetscLogPLB)((MAT_PtAP),0,(PetscObject
)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9168,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9169 ierr = (*ptap)(A,P,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9169,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9170 ierr = PetscLogEventEnd(MAT_PtAP,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAP].active) ? (*PetscLogPLE)((MAT_PtAP),0,(PetscObject
)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9170,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9171 if (A->symmetric_set && A->symmetric) {
9172 ierr = MatSetOption(*C,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9172,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9173 }
9174 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9175}
9176
9177/*@
9178 MatPtAPNumeric - Computes the matrix product C = P^T * A * P
9179
9180 Neighbor-wise Collective on Mat
9181
9182 Input Parameters:
9183+ A - the matrix
9184- P - the projection matrix
9185
9186 Output Parameters:
9187. C - the product matrix
9188
9189 Notes:
9190 C must have been created by calling MatPtAPSymbolic and must be destroyed by
9191 the user using MatDeatroy().
9192
9193 This routine is currently only implemented for pairs of AIJ matrices and classes
9194 which inherit from AIJ. C will be of type MATAIJ.
9195
9196 Level: intermediate
9197
9198.seealso: MatPtAP(), MatPtAPSymbolic(), MatMatMultNumeric()
9199@*/
9200PetscErrorCode MatPtAPNumeric(Mat A,Mat P,Mat C)
9201{
9202 PetscErrorCode ierr;
9203
9204 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9204; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9205 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9205,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9205,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9205,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9206 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9206,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9207 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9207,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9208 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9208,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9209 PetscValidHeaderSpecific(P,MAT_CLASSID,2)do { if (!P) return PetscError(((MPI_Comm)0x44000001),9209,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(P,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(P))->classid != MAT_CLASSID) { if (
((PetscObject)(P))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9209,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9210 PetscValidType(P,2)do { if (!((PetscObject)P)->type_name) return PetscError((
(MPI_Comm)0x44000001),9210,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)P)->class_name,2); } while (0)
;
9211 MatCheckPreallocated(P,2)do { if (__builtin_expect(!!(!(P)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9211,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"P",__func__); } while (0)
;
9212 if (!P->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9212,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9213 if (P->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9213,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9214 PetscValidHeaderSpecific(C,MAT_CLASSID,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9214,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(C,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9214,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(C))->classid != MAT_CLASSID) { if (
((PetscObject)(C))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9214,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),9214,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
9215 PetscValidType(C,3)do { if (!((PetscObject)C)->type_name) return PetscError((
(MPI_Comm)0x44000001),9215,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)C)->class_name,3); } while (0)
;
9216 MatCheckPreallocated(C,3)do { if (__builtin_expect(!!(!(C)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9216,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(3),"C",__func__); } while (0)
;
9217 if (C->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9217,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9218 if (P->cmap->N!=C->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->cmap->N,C->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9218,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->cmap->
N,C->rmap->N)
;
9219 if (P->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9219,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->rmap->
N,A->cmap->N)
;
9220 if (A->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9220,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->
cmap->N)
;
9221 if (P->cmap->N!=C->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->cmap->N,C->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9221,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->cmap->
N,C->cmap->N)
;
9222 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9223
9224 if (!C->ops->ptapnumeric) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"MatPtAPNumeric implementation is missing. You should call MatPtAPSymbolic first")return PetscError(PetscObjectComm((PetscObject)C),9224,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"MatPtAPNumeric implementation is missing. You should call MatPtAPSymbolic first"
)
;
9225 ierr = PetscLogEventBegin(MAT_PtAPNumeric,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPNumeric].active) ? (*PetscLogPLB)((MAT_PtAPNumeric),
0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9225,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9226 ierr = (*C->ops->ptapnumeric)(A,P,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9226,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9227 ierr = PetscLogEventEnd(MAT_PtAPNumeric,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPNumeric].active) ? (*PetscLogPLE)((MAT_PtAPNumeric),
0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9227,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9228 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9229}
9230
9231/*@
9232 MatPtAPSymbolic - Creates the (i,j) structure of the matrix product C = P^T * A * P
9233
9234 Neighbor-wise Collective on Mat
9235
9236 Input Parameters:
9237+ A - the matrix
9238- P - the projection matrix
9239
9240 Output Parameters:
9241. C - the (i,j) structure of the product matrix
9242
9243 Notes:
9244 C will be created and must be destroyed by the user with MatDestroy().
9245
9246 This routine is currently only implemented for pairs of SeqAIJ matrices and classes
9247 which inherit from SeqAIJ. C will be of type MATSEQAIJ. The product is computed using
9248 this (i,j) structure by calling MatPtAPNumeric().
9249
9250 Level: intermediate
9251
9252.seealso: MatPtAP(), MatPtAPNumeric(), MatMatMultSymbolic()
9253@*/
9254PetscErrorCode MatPtAPSymbolic(Mat A,Mat P,PetscReal fill,Mat *C)
9255{
9256 PetscErrorCode ierr;
9257
9258 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9258; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9259 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9259,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9259,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9259,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9259,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9260 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9260,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9261 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9261,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9262 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9262,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9263 if (fill <1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9263,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9264 PetscValidHeaderSpecific(P,MAT_CLASSID,2)do { if (!P) return PetscError(((MPI_Comm)0x44000001),9264,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(P,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9264,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(P))->classid != MAT_CLASSID) { if (
((PetscObject)(P))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9264,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9264,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9265 PetscValidType(P,2)do { if (!((PetscObject)P)->type_name) return PetscError((
(MPI_Comm)0x44000001),9265,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)P)->class_name,2); } while (0)
;
9266 MatCheckPreallocated(P,2)do { if (__builtin_expect(!!(!(P)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9266,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"P",__func__); } while (0)
;
9267 if (!P->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9267,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9268 if (P->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9268,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9269 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9269,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9269,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9270
9271 if (P->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",P->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9271,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",P->rmap->
N,A->cmap->N)
;
9272 if (A->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9272,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->
cmap->N)
;
9273 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9273,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9274
9275 if (!A->ops->ptapsymbolic) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatType %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9275,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatType %s",((PetscObject)A)->type_name)
;
9276 ierr = PetscLogEventBegin(MAT_PtAPSymbolic,A,P,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPSymbolic].active) ? (*PetscLogPLB)((MAT_PtAPSymbolic
),0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9276,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9277 ierr = (*A->ops->ptapsymbolic)(A,P,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9277,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9278 ierr = PetscLogEventEnd(MAT_PtAPSymbolic,A,P,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_PtAPSymbolic].active) ? (*PetscLogPLE)((MAT_PtAPSymbolic
),0,(PetscObject)(A),(PetscObject)(P),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9278,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9279
9280 /* ierr = MatSetBlockSize(*C,A->rmap->bs);CHKERRQ(ierr); NO! this is not always true -ma */
9281 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9282}
9283
9284/*@
9285 MatRARt - Creates the matrix product C = R * A * R^T
9286
9287 Neighbor-wise Collective on Mat
9288
9289 Input Parameters:
9290+ A - the matrix
9291. R - the projection matrix
9292. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9293- fill - expected fill as ratio of nnz(C)/nnz(A), use PETSC_DEFAULT if you do not have a good estimate
9294 if the result is a dense matrix this is irrelevent
9295
9296 Output Parameters:
9297. C - the product matrix
9298
9299 Notes:
9300 C will be created and must be destroyed by the user with MatDestroy().
9301
9302 This routine is currently only implemented for pairs of AIJ matrices and classes
9303 which inherit from AIJ. Due to PETSc sparse matrix block row distribution among processes,
9304 parallel MatRARt is implemented via explicit transpose of R, which could be very expensive.
9305 We recommend using MatPtAP().
9306
9307 Level: intermediate
9308
9309.seealso: MatRARtSymbolic(), MatRARtNumeric(), MatMatMult(), MatPtAP()
9310@*/
9311PetscErrorCode MatRARt(Mat A,Mat R,MatReuse scall,PetscReal fill,Mat *C)
9312{
9313 PetscErrorCode ierr;
9314
9315 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9315; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9316 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9316,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9316,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9316,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9316,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9317 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9317,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9318 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9318,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9319 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9319,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9320 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9320,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9321 PetscValidHeaderSpecific(R,MAT_CLASSID,2)do { if (!R) return PetscError(((MPI_Comm)0x44000001),9321,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(R,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9321,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(R))->classid != MAT_CLASSID) { if (
((PetscObject)(R))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9321,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9321,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9322 PetscValidType(R,2)do { if (!((PetscObject)R)->type_name) return PetscError((
(MPI_Comm)0x44000001),9322,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)R)->class_name,2); } while (0)
;
9323 MatCheckPreallocated(R,2)do { if (__builtin_expect(!!(!(R)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9323,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"R",__func__); } while (0)
;
9324 if (!R->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9324,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9325 if (R->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9325,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9326 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9326,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9326,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9327 if (R->cmap->N!=A->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)R),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->cmap->N,A->rmap->N)return PetscError(PetscObjectComm((PetscObject)R),9327,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->cmap->
N,A->rmap->N)
;
9328
9329 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9330 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9330,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9331 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9331,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9332
9333 if (!A->ops->rart) {
9334 Mat Rt;
9335 ierr = MatTranspose(R,MAT_INITIAL_MATRIX,&Rt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9335,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9336 ierr = MatMatMatMult(R,A,Rt,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9336,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9337 ierr = MatDestroy(&Rt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9337,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9338 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9339 }
9340 ierr = PetscLogEventBegin(MAT_RARt,A,R,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARt].active) ? (*PetscLogPLB)((MAT_RARt),0,(PetscObject
)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9340,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9341 ierr = (*A->ops->rart)(A,R,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9341,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9342 ierr = PetscLogEventEnd(MAT_RARt,A,R,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARt].active) ? (*PetscLogPLE)((MAT_RARt),0,(PetscObject
)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9342,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9343 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9344}
9345
9346/*@
9347 MatRARtNumeric - Computes the matrix product C = R * A * R^T
9348
9349 Neighbor-wise Collective on Mat
9350
9351 Input Parameters:
9352+ A - the matrix
9353- R - the projection matrix
9354
9355 Output Parameters:
9356. C - the product matrix
9357
9358 Notes:
9359 C must have been created by calling MatRARtSymbolic and must be destroyed by
9360 the user using MatDestroy().
9361
9362 This routine is currently only implemented for pairs of AIJ matrices and classes
9363 which inherit from AIJ. C will be of type MATAIJ.
9364
9365 Level: intermediate
9366
9367.seealso: MatRARt(), MatRARtSymbolic(), MatMatMultNumeric()
9368@*/
9369PetscErrorCode MatRARtNumeric(Mat A,Mat R,Mat C)
9370{
9371 PetscErrorCode ierr;
9372
9373 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9373; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9374 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9374,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9374,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9374,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9374,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9375 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9375,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9376 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9376,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9377 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9377,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9378 PetscValidHeaderSpecific(R,MAT_CLASSID,2)do { if (!R) return PetscError(((MPI_Comm)0x44000001),9378,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(R,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9378,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(R))->classid != MAT_CLASSID) { if (
((PetscObject)(R))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9378,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9378,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9379 PetscValidType(R,2)do { if (!((PetscObject)R)->type_name) return PetscError((
(MPI_Comm)0x44000001),9379,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)R)->class_name,2); } while (0)
;
9380 MatCheckPreallocated(R,2)do { if (__builtin_expect(!!(!(R)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9380,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"R",__func__); } while (0)
;
9381 if (!R->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9381,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9382 if (R->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9382,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9383 PetscValidHeaderSpecific(C,MAT_CLASSID,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9383,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(C,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9383,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(C))->classid != MAT_CLASSID) { if (
((PetscObject)(C))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9383,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),9383,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
9384 PetscValidType(C,3)do { if (!((PetscObject)C)->type_name) return PetscError((
(MPI_Comm)0x44000001),9384,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)C)->class_name,3); } while (0)
;
9385 MatCheckPreallocated(C,3)do { if (__builtin_expect(!!(!(C)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9385,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(3),"C",__func__); } while (0)
;
9386 if (C->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9386,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9387 if (R->rmap->N!=C->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->rmap->N,C->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9387,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->rmap->
N,C->rmap->N)
;
9388 if (R->cmap->N!=A->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->cmap->N,A->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9388,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->cmap->
N,A->rmap->N)
;
9389 if (A->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9389,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->
cmap->N)
;
9390 if (R->rmap->N!=C->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->rmap->N,C->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9390,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->rmap->
N,C->cmap->N)
;
9391 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9391,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9392
9393 ierr = PetscLogEventBegin(MAT_RARtNumeric,A,R,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARtNumeric].active) ? (*PetscLogPLB)((MAT_RARtNumeric),
0,(PetscObject)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9393,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9394 ierr = (*A->ops->rartnumeric)(A,R,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9394,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9395 ierr = PetscLogEventEnd(MAT_RARtNumeric,A,R,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARtNumeric].active) ? (*PetscLogPLE)((MAT_RARtNumeric),
0,(PetscObject)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9395,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9396 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9397}
9398
9399/*@
9400 MatRARtSymbolic - Creates the (i,j) structure of the matrix product C = R * A * R^T
9401
9402 Neighbor-wise Collective on Mat
9403
9404 Input Parameters:
9405+ A - the matrix
9406- R - the projection matrix
9407
9408 Output Parameters:
9409. C - the (i,j) structure of the product matrix
9410
9411 Notes:
9412 C will be created and must be destroyed by the user with MatDestroy().
9413
9414 This routine is currently only implemented for pairs of SeqAIJ matrices and classes
9415 which inherit from SeqAIJ. C will be of type MATSEQAIJ. The product is computed using
9416 this (i,j) structure by calling MatRARtNumeric().
9417
9418 Level: intermediate
9419
9420.seealso: MatRARt(), MatRARtNumeric(), MatMatMultSymbolic()
9421@*/
9422PetscErrorCode MatRARtSymbolic(Mat A,Mat R,PetscReal fill,Mat *C)
9423{
9424 PetscErrorCode ierr;
9425
9426 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9426; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9427 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9427,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9427,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9427,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9427,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9428 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9428,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9429 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9429,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9430 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9430,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9431 if (fill <1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9431,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9432 PetscValidHeaderSpecific(R,MAT_CLASSID,2)do { if (!R) return PetscError(((MPI_Comm)0x44000001),9432,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(R,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9432,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(R))->classid != MAT_CLASSID) { if (
((PetscObject)(R))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9432,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9432,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9433 PetscValidType(R,2)do { if (!((PetscObject)R)->type_name) return PetscError((
(MPI_Comm)0x44000001),9433,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)R)->class_name,2); } while (0)
;
9434 MatCheckPreallocated(R,2)do { if (__builtin_expect(!!(!(R)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9434,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"R",__func__); } while (0)
;
9435 if (!R->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9435,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9436 if (R->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9436,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9437 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9437,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9437,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9438
9439 if (R->cmap->N!=A->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",R->cmap->N,A->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9439,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",R->cmap->
N,A->rmap->N)
;
9440 if (A->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9440,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix 'A' must be square, %D != %D",A->rmap->N,A->
cmap->N)
;
9441 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9441,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9442 ierr = PetscLogEventBegin(MAT_RARtSymbolic,A,R,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARtSymbolic].active) ? (*PetscLogPLB)((MAT_RARtSymbolic
),0,(PetscObject)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9442,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9443 ierr = (*A->ops->rartsymbolic)(A,R,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9443,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9444 ierr = PetscLogEventEnd(MAT_RARtSymbolic,A,R,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RARtSymbolic].active) ? (*PetscLogPLE)((MAT_RARtSymbolic
),0,(PetscObject)(A),(PetscObject)(R),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9444,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9445
9446 ierr = MatSetBlockSizes(*C,PetscAbs(R->rmap->bs)(((R->rmap->bs) >= 0) ? (R->rmap->bs) : (-(R->
rmap->bs)))
,PetscAbs(R->rmap->bs)(((R->rmap->bs) >= 0) ? (R->rmap->bs) : (-(R->
rmap->bs)))
);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9446,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9447 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9448}
9449
9450/*@
9451 MatMatMult - Performs Matrix-Matrix Multiplication C=A*B.
9452
9453 Neighbor-wise Collective on Mat
9454
9455 Input Parameters:
9456+ A - the left matrix
9457. B - the right matrix
9458. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9459- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if you do not have a good estimate
9460 if the result is a dense matrix this is irrelevent
9461
9462 Output Parameters:
9463. C - the product matrix
9464
9465 Notes:
9466 Unless scall is MAT_REUSE_MATRIX C will be created.
9467
9468 MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
9469 call to this function with either MAT_INITIAL_MATRIX or MatMatMultSymbolic()
9470
9471 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9472 actually needed.
9473
9474 If you have many matrices with the same non-zero structure to multiply, you
9475 should either
9476$ 1) use MAT_REUSE_MATRIX in all calls but the first or
9477$ 2) call MatMatMultSymbolic() once and then MatMatMultNumeric() for each product needed
9478 In the special case where matrix B (and hence C) are dense you can create the correctly sized matrix C yourself and then call this routine
9479 with MAT_REUSE_MATRIX, rather than first having MatMatMult() create it for you. You can NEVER do this if the matrix C is sparse.
9480
9481 Level: intermediate
9482
9483.seealso: MatMatMultSymbolic(), MatMatMultNumeric(), MatTransposeMatMult(), MatMatTransposeMult(), MatPtAP()
9484@*/
9485PetscErrorCode MatMatMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9486{
9487 PetscErrorCode ierr;
9488 PetscErrorCode (*fA)(Mat,Mat,MatReuse,PetscReal,Mat*);
9489 PetscErrorCode (*fB)(Mat,Mat,MatReuse,PetscReal,Mat*);
9490 PetscErrorCode (*mult)(Mat,Mat,MatReuse,PetscReal,Mat*)=NULL((void*)0);
9491 Mat T;
9492 PetscBool istrans;
9493
9494 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9494; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9495 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9495,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9495,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9495,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9495,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9496 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9496,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9497 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9497,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9498 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9498,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9499 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9499,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9500 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9500,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9500,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9500,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9500,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9501 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9501,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9502 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9502,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9503 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9503,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9504 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9504,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9505 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9505,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9505,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9506 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9506,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9507 if (B->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",B->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9507,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",B->rmap->
N,A->cmap->N)
;
9508 ierr = PetscObjectTypeCompare((PetscObject)A,MATTRANSPOSEMAT"transpose",&istrans);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9508,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9509 if (istrans) {
9510 ierr = MatTransposeGetMat(A,&T);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9510,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9511 ierr = MatTransposeMatMult(T,B,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9511,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9512 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9513 } else {
9514 ierr = PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT"transpose",&istrans);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9514,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9515 if (istrans) {
9516 ierr = MatTransposeGetMat(B,&T);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9516,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9517 ierr = MatMatTransposeMult(A,T,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9517,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9518 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9519 }
9520 }
9521 if (scall == MAT_REUSE_MATRIX) {
9522 PetscValidPointer(*C,5)do { if (!*C) return PetscError(((MPI_Comm)0x44000001),9522,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",5); if (!PetscCheckPointer(*C
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9522,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",5); } while (0)
;
9523 PetscValidHeaderSpecific(*C,MAT_CLASSID,5)do { if (!*C) return PetscError(((MPI_Comm)0x44000001),9523,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",5); if (!PetscCheckPointer(*C,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),9523,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(*C))->classid != MAT_CLASSID) { if
(((PetscObject)(*C))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),9523,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),9523,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",5); } } while (0)
;
9524 ierr = PetscLogEventBegin(MAT_MatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMult].active) ? (*PetscLogPLB)((MAT_MatMult),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9524,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9525 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultNumeric].active) ? (*PetscLogPLB)((MAT_MatMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9525,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9526 ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9526,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9527 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultNumeric].active) ? (*PetscLogPLE)((MAT_MatMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9527,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9528 ierr = PetscLogEventEnd(MAT_MatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMult].active) ? (*PetscLogPLE)((MAT_MatMult),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9528,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9529 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9530 }
9531 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9532 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9532,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9533
9534 fA = A->ops->matmult;
9535 fB = B->ops->matmult;
9536 if (fB == fA) {
9537 if (!fB) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatMatMult not supported for B of type %s",((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9537,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatMatMult not supported for B of type %s",((PetscObject)B)
->type_name)
;
9538 mult = fB;
9539 } else {
9540 /* dispatch based on the type of A and B from their PetscObject's PetscFunctionLists. */
9541 char multname[256];
9542 ierr = PetscStrncpy(multname,"MatMatMult_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9542,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9543 ierr = PetscStrlcat(multname,((PetscObject)A)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9543,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9544 ierr = PetscStrlcat(multname,"_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9544,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9545 ierr = PetscStrlcat(multname,((PetscObject)B)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9545,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9546 ierr = PetscStrlcat(multname,"_C",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* e.g., multname = "MatMatMult_seqdense_seqaij_C" */
9547 ierr = PetscObjectQueryFunction((PetscObject)B,multname,&mult)PetscObjectQueryFunction_Private(((PetscObject)B),(multname),
(PetscVoidFunction*)(&mult))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9547,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9548 if (!mult) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatMatMult requires A, %s, to be compatible with B, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9548,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatMatMult requires A, %s, to be compatible with B, %s",((PetscObject
)A)->type_name,((PetscObject)B)->type_name)
;
9549 }
9550 ierr = PetscLogEventBegin(MAT_MatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMult].active) ? (*PetscLogPLB)((MAT_MatMult),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9550,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9551 ierr = (*mult)(A,B,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9551,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9552 ierr = PetscLogEventEnd(MAT_MatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMult].active) ? (*PetscLogPLE)((MAT_MatMult),0,(PetscObject
)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject)(0)) : 0 )
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9552,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9553 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9554}
9555
9556/*@
9557 MatMatMultSymbolic - Performs construction, preallocation, and computes the ij structure
9558 of the matrix-matrix product C=A*B. Call this routine before calling MatMatMultNumeric().
9559
9560 Neighbor-wise Collective on Mat
9561
9562 Input Parameters:
9563+ A - the left matrix
9564. B - the right matrix
9565- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if you do not have a good estimate,
9566 if C is a dense matrix this is irrelevent
9567
9568 Output Parameters:
9569. C - the product matrix
9570
9571 Notes:
9572 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9573 actually needed.
9574
9575 This routine is currently implemented for
9576 - pairs of AIJ matrices and classes which inherit from AIJ, C will be of type AIJ
9577 - pairs of AIJ (A) and Dense (B) matrix, C will be of type Dense.
9578 - pairs of Dense (A) and AIJ (B) matrix, C will be of type Dense.
9579
9580 Level: intermediate
9581
9582 Developers Note: There are ways to estimate the number of nonzeros in the resulting product, see for example, https://arxiv.org/abs/1006.4173
9583 We should incorporate them into PETSc.
9584
9585.seealso: MatMatMult(), MatMatMultNumeric()
9586@*/
9587PetscErrorCode MatMatMultSymbolic(Mat A,Mat B,PetscReal fill,Mat *C)
9588{
9589 PetscErrorCode ierr;
9590 PetscErrorCode (*Asymbolic)(Mat,Mat,PetscReal,Mat*);
9591 PetscErrorCode (*Bsymbolic)(Mat,Mat,PetscReal,Mat*);
9592 PetscErrorCode (*symbolic)(Mat,Mat,PetscReal,Mat*)=NULL((void*)0);
9593
9594 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9594; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9595 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9595,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9595,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9595,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9595,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9596 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9596,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9597 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9597,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9598 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9598,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9599
9600 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9600,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9600,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9600,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9600,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9601 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9601,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9602 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9602,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9603 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9603,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9604 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9604,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9605 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9605,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9605,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9606
9607 if (B->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",B->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9607,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",B->rmap->
N,A->cmap->N)
;
9608 if (fill == PETSC_DEFAULT-2) fill = 2.0;
9609 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be > 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9609,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be > 1.0",(double)fill)
;
9610 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9610,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9611
9612 Asymbolic = A->ops->matmultsymbolic;
9613 Bsymbolic = B->ops->matmultsymbolic;
9614 if (Asymbolic == Bsymbolic) {
9615 if (!Bsymbolic) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"C=A*B not implemented for B of type %s",((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9615,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"C=A*B not implemented for B of type %s",((PetscObject)B)->
type_name)
;
9616 symbolic = Bsymbolic;
9617 } else { /* dispatch based on the type of A and B */
9618 char symbolicname[256];
9619 ierr = PetscStrncpy(symbolicname,"MatMatMultSymbolic_",sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9619,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9620 ierr = PetscStrlcat(symbolicname,((PetscObject)A)->type_name,sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9620,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9621 ierr = PetscStrlcat(symbolicname,"_",sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9621,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9622 ierr = PetscStrlcat(symbolicname,((PetscObject)B)->type_name,sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9622,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9623 ierr = PetscStrlcat(symbolicname,"_C",sizeof(symbolicname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9623,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9624 ierr = PetscObjectQueryFunction((PetscObject)B,symbolicname,&symbolic)PetscObjectQueryFunction_Private(((PetscObject)B),(symbolicname
),(PetscVoidFunction*)(&symbolic))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9624,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9625 if (!symbolic) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatMatMultSymbolic requires A, %s, to be compatible with B, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9625,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatMatMultSymbolic requires A, %s, to be compatible with B, %s"
,((PetscObject)A)->type_name,((PetscObject)B)->type_name
)
;
9626 }
9627 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultSymbolic].active) ? (*PetscLogPLB)((MAT_MatMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9627,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9628 ierr = (*symbolic)(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9628,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9629 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultSymbolic].active) ? (*PetscLogPLE)((MAT_MatMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9629,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9630 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9631}
9632
9633/*@
9634 MatMatMultNumeric - Performs the numeric matrix-matrix product.
9635 Call this routine after first calling MatMatMultSymbolic().
9636
9637 Neighbor-wise Collective on Mat
9638
9639 Input Parameters:
9640+ A - the left matrix
9641- B - the right matrix
9642
9643 Output Parameters:
9644. C - the product matrix, which was created by from MatMatMultSymbolic() or a call to MatMatMult().
9645
9646 Notes:
9647 C must have been created with MatMatMultSymbolic().
9648
9649 This routine is currently implemented for
9650 - pairs of AIJ matrices and classes which inherit from AIJ, C will be of type MATAIJ.
9651 - pairs of AIJ (A) and Dense (B) matrix, C will be of type Dense.
9652 - pairs of Dense (A) and AIJ (B) matrix, C will be of type Dense.
9653
9654 Level: intermediate
9655
9656.seealso: MatMatMult(), MatMatMultSymbolic()
9657@*/
9658PetscErrorCode MatMatMultNumeric(Mat A,Mat B,Mat C)
9659{
9660 PetscErrorCode ierr;
9661
9662 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9662; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9663 ierr = MatMatMult(A,B,MAT_REUSE_MATRIX,0.0,&C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9663,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9664 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9665}
9666
9667/*@
9668 MatMatTransposeMult - Performs Matrix-Matrix Multiplication C=A*B^T.
9669
9670 Neighbor-wise Collective on Mat
9671
9672 Input Parameters:
9673+ A - the left matrix
9674. B - the right matrix
9675. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9676- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if not known
9677
9678 Output Parameters:
9679. C - the product matrix
9680
9681 Notes:
9682 C will be created if MAT_INITIAL_MATRIX and must be destroyed by the user with MatDestroy().
9683
9684 MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call
9685
9686 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9687 actually needed.
9688
9689 This routine is currently only implemented for pairs of SeqAIJ matrices, for the SeqDense class,
9690 and for pairs of MPIDense matrices.
9691
9692 Options Database Keys:
9693. -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorthims for MPIDense matrices: the
9694 first redundantly copies the transposed B matrix on each process and requiers O(log P) communication complexity;
9695 the second never stores more than one portion of the B matrix at a time by requires O(P) communication complexity.
9696
9697 Level: intermediate
9698
9699.seealso: MatMatTransposeMultSymbolic(), MatMatTransposeMultNumeric(), MatMatMult(), MatTransposeMatMult() MatPtAP()
9700@*/
9701PetscErrorCode MatMatTransposeMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9702{
9703 PetscErrorCode ierr;
9704 PetscErrorCode (*fA)(Mat,Mat,MatReuse,PetscReal,Mat*);
9705 PetscErrorCode (*fB)(Mat,Mat,MatReuse,PetscReal,Mat*);
9706 Mat T;
9707 PetscBool istrans;
9708
9709 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9709; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9710 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9710,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9710,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9710,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9710,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9711 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9711,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9712 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9712,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9713 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9713,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9714 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9714,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9715 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9715,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9715,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9715,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9715,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9716 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9716,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9717 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9717,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9718 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9718,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9719 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9719,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9720 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9720,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9720,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9721 if (B->cmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, AN %D != BN %D",A->cmap->N,B->cmap->N)return PetscError(PetscObjectComm((PetscObject)A),9721,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, AN %D != BN %D",A->cmap
->N,B->cmap->N)
;
9722 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9723 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be > 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9723,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be > 1.0",(double)fill)
;
9724 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9724,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9725
9726 ierr = PetscObjectTypeCompare((PetscObject)B,MATTRANSPOSEMAT"transpose",&istrans);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9726,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9727 if (istrans) {
9728 ierr = MatTransposeGetMat(B,&T);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9728,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9729 ierr = MatMatMult(A,T,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9729,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9730 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9731 }
9732 fA = A->ops->mattransposemult;
9733 if (!fA) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatMatTransposeMult not supported for A of type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9733,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatMatTransposeMult not supported for A of type %s",((PetscObject
)A)->type_name)
;
9734 fB = B->ops->mattransposemult;
9735 if (!fB) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatMatTransposeMult not supported for B of type %s",((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9735,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatMatTransposeMult not supported for B of type %s",((PetscObject
)B)->type_name)
;
9736 if (fB!=fA) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatMatTransposeMult requires A, %s, to be compatible with B, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9736,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatMatTransposeMult requires A, %s, to be compatible with B, %s"
,((PetscObject)A)->type_name,((PetscObject)B)->type_name
)
;
9737
9738 ierr = PetscLogEventBegin(MAT_MatTransposeMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMult].active) ? (*PetscLogPLB)((MAT_MatTransposeMult
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9738,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9739 if (scall == MAT_INITIAL_MATRIX) {
9740 ierr = PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMultSymbolic].active) ? (*PetscLogPLB)((MAT_MatTransposeMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9740,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9741 ierr = (*A->ops->mattransposemultsymbolic)(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9741,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9742 ierr = PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMultSymbolic].active) ? (*PetscLogPLE)((MAT_MatTransposeMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9742,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9743 }
9744 ierr = PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMultNumeric].active) ? (*PetscLogPLB)((MAT_MatTransposeMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9744,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9745 ierr = (*A->ops->mattransposemultnumeric)(A,B,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9745,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9746 ierr = PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMultNumeric].active) ? (*PetscLogPLE)((MAT_MatTransposeMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9746,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9747 ierr = PetscLogEventEnd(MAT_MatTransposeMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatTransposeMult].active) ? (*PetscLogPLE)((MAT_MatTransposeMult
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9747,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9748 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9749}
9750
9751/*@
9752 MatTransposeMatMult - Performs Matrix-Matrix Multiplication C=A^T*B.
9753
9754 Neighbor-wise Collective on Mat
9755
9756 Input Parameters:
9757+ A - the left matrix
9758. B - the right matrix
9759. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9760- fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if not known
9761
9762 Output Parameters:
9763. C - the product matrix
9764
9765 Notes:
9766 C will be created if MAT_INITIAL_MATRIX and must be destroyed by the user with MatDestroy().
9767
9768 MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call
9769
9770 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9771 actually needed.
9772
9773 This routine is currently implemented for pairs of AIJ matrices and pairs of SeqDense matrices and classes
9774 which inherit from SeqAIJ. C will be of same type as the input matrices.
9775
9776 Level: intermediate
9777
9778.seealso: MatTransposeMatMultSymbolic(), MatTransposeMatMultNumeric(), MatMatMult(), MatMatTransposeMult(), MatPtAP()
9779@*/
9780PetscErrorCode MatTransposeMatMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9781{
9782 PetscErrorCode ierr;
9783 PetscErrorCode (*fA)(Mat,Mat,MatReuse,PetscReal,Mat*);
9784 PetscErrorCode (*fB)(Mat,Mat,MatReuse,PetscReal,Mat*);
9785 PetscErrorCode (*transposematmult)(Mat,Mat,MatReuse,PetscReal,Mat*) = NULL((void*)0);
9786 Mat T;
9787 PetscBool istrans;
9788
9789 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9789; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9790 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9790,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9790,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9790,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9790,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9791 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9791,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9792 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9792,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9793 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9793,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9794 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9794,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9795 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9795,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9795,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9795,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9795,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9796 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9796,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9797 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9797,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9798 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9798,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9799 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9799,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9800 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9800,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9800,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9801 if (B->rmap->N!=A->rmap->N) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",B->rmap->N,A->rmap->N)return PetscError(PetscObjectComm((PetscObject)A),9801,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",B->rmap->
N,A->rmap->N)
;
9802 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9803 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be > 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9803,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be > 1.0",(double)fill)
;
9804 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9804,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9805
9806 ierr = PetscObjectTypeCompare((PetscObject)A,MATTRANSPOSEMAT"transpose",&istrans);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9806,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9807 if (istrans) {
9808 ierr = MatTransposeGetMat(A,&T);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9808,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9809 ierr = MatMatMult(T,B,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9809,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9810 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9811 }
9812 fA = A->ops->transposematmult;
9813 fB = B->ops->transposematmult;
9814 if (fB==fA) {
9815 if (!fA) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatTransposeMatMult not supported for A of type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9815,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatTransposeMatMult not supported for A of type %s",((PetscObject
)A)->type_name)
;
9816 transposematmult = fA;
9817 } else {
9818 /* dispatch based on the type of A and B from their PetscObject's PetscFunctionLists. */
9819 char multname[256];
9820 ierr = PetscStrncpy(multname,"MatTransposeMatMult_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9820,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9821 ierr = PetscStrlcat(multname,((PetscObject)A)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9821,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9822 ierr = PetscStrlcat(multname,"_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9822,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9823 ierr = PetscStrlcat(multname,((PetscObject)B)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9823,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9824 ierr = PetscStrlcat(multname,"_C",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9824,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* e.g., multname = "MatMatMult_seqdense_seqaij_C" */
9825 ierr = PetscObjectQueryFunction((PetscObject)B,multname,&transposematmult)PetscObjectQueryFunction_Private(((PetscObject)B),(multname),
(PetscVoidFunction*)(&transposematmult))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9825,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9826 if (!transposematmult) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatTransposeMatMult requires A, %s, to be compatible with B, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9826,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatTransposeMatMult requires A, %s, to be compatible with B, %s"
,((PetscObject)A)->type_name,((PetscObject)B)->type_name
)
;
9827 }
9828 ierr = PetscLogEventBegin(MAT_TransposeMatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeMatMult].active) ? (*PetscLogPLB)((MAT_TransposeMatMult
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9828,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9829 ierr = (*transposematmult)(A,B,scall,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9829,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9830 ierr = PetscLogEventEnd(MAT_TransposeMatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeMatMult].active) ? (*PetscLogPLE)((MAT_TransposeMatMult
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9830,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9831 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9832}
9833
9834/*@
9835 MatMatMatMult - Performs Matrix-Matrix-Matrix Multiplication D=A*B*C.
9836
9837 Neighbor-wise Collective on Mat
9838
9839 Input Parameters:
9840+ A - the left matrix
9841. B - the middle matrix
9842. C - the right matrix
9843. scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9844- fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use PETSC_DEFAULT if you do not have a good estimate
9845 if the result is a dense matrix this is irrelevent
9846
9847 Output Parameters:
9848. D - the product matrix
9849
9850 Notes:
9851 Unless scall is MAT_REUSE_MATRIX D will be created.
9852
9853 MAT_REUSE_MATRIX can only be used if the matrices A, B and C have the same nonzero pattern as in the previous call
9854
9855 To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9856 actually needed.
9857
9858 If you have many matrices with the same non-zero structure to multiply, you
9859 should use MAT_REUSE_MATRIX in all calls but the first or
9860
9861 Level: intermediate
9862
9863.seealso: MatMatMult, MatPtAP()
9864@*/
9865PetscErrorCode MatMatMatMult(Mat A,Mat B,Mat C,MatReuse scall,PetscReal fill,Mat *D)
9866{
9867 PetscErrorCode ierr;
9868 PetscErrorCode (*fA)(Mat,Mat,Mat,MatReuse,PetscReal,Mat*);
9869 PetscErrorCode (*fB)(Mat,Mat,Mat,MatReuse,PetscReal,Mat*);
9870 PetscErrorCode (*fC)(Mat,Mat,Mat,MatReuse,PetscReal,Mat*);
9871 PetscErrorCode (*mult)(Mat,Mat,Mat,MatReuse,PetscReal,Mat*)=NULL((void*)0);
9872
9873 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9873; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9874 PetscValidHeaderSpecific(A,MAT_CLASSID,1)do { if (!A) return PetscError(((MPI_Comm)0x44000001),9874,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(A,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9874,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(A))->classid != MAT_CLASSID) { if (
((PetscObject)(A))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9874,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9874,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9875 PetscValidType(A,1)do { if (!((PetscObject)A)->type_name) return PetscError((
(MPI_Comm)0x44000001),9875,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)A)->class_name,1); } while (0)
;
9876 MatCheckPreallocated(A,1)do { if (__builtin_expect(!!(!(A)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9876,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"A",__func__); } while (0)
;
9877 if (scall == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),9877,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
9878 if (!A->assembled) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)A),9878,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9879 if (A->factortype) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)A),9879,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9880 PetscValidHeaderSpecific(B,MAT_CLASSID,2)do { if (!B) return PetscError(((MPI_Comm)0x44000001),9880,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",2); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9880,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9880,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),9880,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
9881 PetscValidType(B,2)do { if (!((PetscObject)B)->type_name) return PetscError((
(MPI_Comm)0x44000001),9881,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)B)->class_name,2); } while (0)
;
9882 MatCheckPreallocated(B,2)do { if (__builtin_expect(!!(!(B)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9882,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(2),"B",__func__); } while (0)
;
9883 if (!B->assembled) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)B),9883,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9884 if (B->factortype) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)B),9884,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9885 PetscValidHeaderSpecific(C,MAT_CLASSID,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9885,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",3); if (!PetscCheckPointer(C,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),9885,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(C))->classid != MAT_CLASSID) { if (
((PetscObject)(C))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),9885,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),9885,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
9886 PetscValidPointer(C,3)do { if (!C) return PetscError(((MPI_Comm)0x44000001),9886,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",3); if (!PetscCheckPointer(C,
PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9886,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",3); } while (0)
;
9887 MatCheckPreallocated(C,3)do { if (__builtin_expect(!!(!(C)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9887,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(3),"C",__func__); } while (0)
;
9888 if (!C->assembled) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)C),9888,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9889 if (C->factortype) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)C),9889,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9890 if (B->rmap->N!=A->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",B->rmap->N,A->cmap->N)return PetscError(PetscObjectComm((PetscObject)B),9890,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",B->rmap->
N,A->cmap->N)
;
9891 if (C->rmap->N!=B->cmap->N) SETERRQ2(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_SIZ,"Matrix dimensions are incompatible, %D != %D",C->rmap->N,B->cmap->N)return PetscError(PetscObjectComm((PetscObject)C),9891,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Matrix dimensions are incompatible, %D != %D",C->rmap->
N,B->cmap->N)
;
9892 if (scall == MAT_REUSE_MATRIX) {
9893 PetscValidPointer(*D,6)do { if (!*D) return PetscError(((MPI_Comm)0x44000001),9893,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Pointer: Parameter # %d",6); if (!PetscCheckPointer(*D
,PETSC_CHAR)) return PetscError(((MPI_Comm)0x44000001),9893,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",68,PETSC_ERROR_INITIAL
,"Invalid Pointer: Parameter # %d",6); } while (0)
;
9894 PetscValidHeaderSpecific(*D,MAT_CLASSID,6)do { if (!*D) return PetscError(((MPI_Comm)0x44000001),9894,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",6); if (!PetscCheckPointer(*D,
PETSC_OBJECT)) return PetscError(((MPI_Comm)0x44000001),9894,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,6); if (((PetscObject)(*D))->classid != MAT_CLASSID) { if
(((PetscObject)(*D))->classid == -1) return PetscError(((
MPI_Comm)0x44000001),9894,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,6); else return PetscError(((MPI_Comm)0x44000001),9894,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",6); } } while (0)
;
9895 ierr = PetscLogEventBegin(MAT_MatMatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMatMult].active) ? (*PetscLogPLB)((MAT_MatMatMult),0,
(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9895,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9896 ierr = (*(*D)->ops->matmatmult)(A,B,C,scall,fill,D);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9896,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9897 ierr = PetscLogEventEnd(MAT_MatMatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMatMult].active) ? (*PetscLogPLE)((MAT_MatMatMult),0,
(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9897,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9898 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9899 }
9900 if (fill == PETSC_DEFAULT-2 || fill == PETSC_DECIDE-1) fill = 2.0;
9901 if (fill < 1.0) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Expected fill=%g must be >= 1.0",(double)fill)return PetscError(PetscObjectComm((PetscObject)A),9901,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",60,PETSC_ERROR_INITIAL
,"Expected fill=%g must be >= 1.0",(double)fill)
;
9902
9903 fA = A->ops->matmatmult;
9904 fB = B->ops->matmatmult;
9905 fC = C->ops->matmatmult;
9906 if (fA == fB && fA == fC) {
9907 if (!fA) SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MatMatMatMult not supported for A of type %s",((PetscObject)A)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9907,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"MatMatMatMult not supported for A of type %s",((PetscObject
)A)->type_name)
;
9908 mult = fA;
9909 } else {
9910 /* dispatch based on the type of A, B and C from their PetscObject's PetscFunctionLists. */
9911 char multname[256];
9912 ierr = PetscStrncpy(multname,"MatMatMatMult_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9912,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9913 ierr = PetscStrlcat(multname,((PetscObject)A)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9913,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9914 ierr = PetscStrlcat(multname,"_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9914,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9915 ierr = PetscStrlcat(multname,((PetscObject)B)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9915,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9916 ierr = PetscStrlcat(multname,"_",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9916,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9917 ierr = PetscStrlcat(multname,((PetscObject)C)->type_name,sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9917,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9918 ierr = PetscStrlcat(multname,"_C",sizeof(multname));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9918,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9919 ierr = PetscObjectQueryFunction((PetscObject)B,multname,&mult)PetscObjectQueryFunction_Private(((PetscObject)B),(multname),
(PetscVoidFunction*)(&mult))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9919,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9920 if (!mult) SETERRQ3(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MatMatMatMult requires A, %s, to be compatible with B, %s, C, %s",((PetscObject)A)->type_name,((PetscObject)B)->type_name,((PetscObject)C)->type_name)return PetscError(PetscObjectComm((PetscObject)A),9920,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",75,PETSC_ERROR_INITIAL
,"MatMatMatMult requires A, %s, to be compatible with B, %s, C, %s"
,((PetscObject)A)->type_name,((PetscObject)B)->type_name
,((PetscObject)C)->type_name)
;
9921 }
9922 ierr = PetscLogEventBegin(MAT_MatMatMult,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMatMult].active) ? (*PetscLogPLB)((MAT_MatMatMult),0,
(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9922,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9923 ierr = (*mult)(A,B,C,scall,fill,D);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9923,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9924 ierr = PetscLogEventEnd(MAT_MatMatMult,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMatMult].active) ? (*PetscLogPLE)((MAT_MatMatMult),0,
(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9924,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9925 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9926}
9927
9928/*@
9929 MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
9930
9931 Collective on Mat
9932
9933 Input Parameters:
9934+ mat - the matrix
9935. nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
9936. subcomm - MPI communicator split from the communicator where mat resides in (or MPI_COMM_NULL if nsubcomm is used)
9937- reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9938
9939 Output Parameter:
9940. matredundant - redundant matrix
9941
9942 Notes:
9943 MAT_REUSE_MATRIX can only be used when the nonzero structure of the
9944 original matrix has not changed from that last call to MatCreateRedundantMatrix().
9945
9946 This routine creates the duplicated matrices in subcommunicators; you should NOT create them before
9947 calling it.
9948
9949 Level: advanced
9950
9951
9952.seealso: MatDestroy()
9953@*/
9954PetscErrorCode MatCreateRedundantMatrix(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,MatReuse reuse,Mat *matredundant)
9955{
9956 PetscErrorCode ierr;
9957 MPI_Comm comm;
9958 PetscMPIInt size;
9959 PetscInt mloc_sub,nloc_sub,rstart,rend,M=mat->rmap->N,N=mat->cmap->N,bs=mat->rmap->bs;
9960 Mat_Redundant *redund=NULL((void*)0);
9961 PetscSubcomm psubcomm=NULL((void*)0);
9962 MPI_Comm subcomm_in=subcomm;
9963 Mat *matseq;
9964 IS isrow,iscol;
9965 PetscBool newsubcomm=PETSC_FALSE;
9966
9967 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 9967; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
9968 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),9968,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),9968,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),9968,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),9968,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
9969 if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
9970 PetscValidPointer(*matredundant,5)do { if (!*matredundant) return PetscError(((MPI_Comm)0x44000001
),9970,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",5); if
(!PetscCheckPointer(*matredundant,PETSC_CHAR)) return PetscError
(((MPI_Comm)0x44000001),9970,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",5);
} while (0)
;
9971 PetscValidHeaderSpecific(*matredundant,MAT_CLASSID,5)do { if (!*matredundant) return PetscError(((MPI_Comm)0x44000001
),9971,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",5); if (
!PetscCheckPointer(*matredundant,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),9971,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,5); if (((PetscObject)(*matredundant))->classid != MAT_CLASSID
) { if (((PetscObject)(*matredundant))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),9971,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,5); else return PetscError(((MPI_Comm)0x44000001),9971,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",5); } } while (0)
;
9972 }
9973
9974 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9974,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9975 if (size == 1 || nsubcomm == 1) {
9976 if (reuse == MAT_INITIAL_MATRIX) {
9977 ierr = MatDuplicate(mat,MAT_COPY_VALUES,matredundant);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9977,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9978 } else {
9979 if (*matredundant == mat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix")return PetscError(((MPI_Comm)0x44000001),9979,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix"
)
;
9980 ierr = MatCopy(mat,*matredundant,SAME_NONZERO_PATTERN);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9980,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9981 }
9982 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
9983 }
9984
9985 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),9985,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
9986 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),9986,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
9987 MatCheckPreallocated(mat,1)do { if (__builtin_expect(!!(!(mat)->preallocated),0)) return
PetscError(((MPI_Comm)0x44000001),9987,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Must call MatXXXSetPreallocation() or MatSetUp() on argument %D \"%s\" before %s()"
,(1),"mat",__func__); } while (0)
;
9988
9989 ierr = PetscLogEventBegin(MAT_RedundantMat,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RedundantMat].active) ? (*PetscLogPLB)((MAT_RedundantMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9989,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9990 if (subcomm_in == MPI_COMM_NULL((MPI_Comm)0x04000000) && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
9991 /* create psubcomm, then get subcomm */
9992 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9992,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9993 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9993,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9994 if (nsubcomm < 1 || nsubcomm > size) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"nsubcomm must between 1 and %D",size)return PetscError(((MPI_Comm)0x44000001),9994,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,60,PETSC_ERROR_INITIAL,"nsubcomm must between 1 and %D",size
)
;
9995
9996 ierr = PetscSubcommCreate(comm,&psubcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9996,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9997 ierr = PetscSubcommSetNumber(psubcomm,nsubcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9997,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9998 ierr = PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9998,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
9999 ierr = PetscSubcommSetFromOptions(psubcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),9999,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10000 ierr = PetscCommDuplicate(PetscSubcommChild(psubcomm),&subcomm,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10000,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10001 newsubcomm = PETSC_TRUE;
10002 ierr = PetscSubcommDestroy(&psubcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10002,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10003 }
10004
10005 /* get isrow, iscol and a local sequential matrix matseq[0] */
10006 if (reuse == MAT_INITIAL_MATRIX) {
10007 mloc_sub = PETSC_DECIDE-1;
10008 nloc_sub = PETSC_DECIDE-1;
10009 if (bs < 1) {
10010 ierr = PetscSplitOwnership(subcomm,&mloc_sub,&M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10010,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10011 ierr = PetscSplitOwnership(subcomm,&nloc_sub,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10011,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10012 } else {
10013 ierr = PetscSplitOwnershipBlock(subcomm,bs,&mloc_sub,&M);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10013,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10014 ierr = PetscSplitOwnershipBlock(subcomm,bs,&nloc_sub,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10014,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10015 }
10016 ierr = MPI_Scan(&mloc_sub,&rend,1,MPIU_INT((MPI_Datatype)0x4c000405),MPI_SUM(MPI_Op)(0x58000003),subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10016,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10017 rstart = rend - mloc_sub;
10018 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),mloc_sub,rstart,1,&isrow);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10018,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10019 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),N,0,1,&iscol);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10019,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10020 } else { /* reuse == MAT_REUSE_MATRIX */
10021 if (*matredundant == mat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix")return PetscError(((MPI_Comm)0x44000001),10021,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix"
)
;
10022 /* retrieve subcomm */
10023 ierr = PetscObjectGetComm((PetscObject)(*matredundant),&subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10023,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10024 redund = (*matredundant)->redundant;
10025 isrow = redund->isrow;
10026 iscol = redund->iscol;
10027 matseq = redund->matseq;
10028 }
10029 ierr = MatCreateSubMatrices(mat,1,&isrow,&iscol,reuse,&matseq);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10029,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10030
10031 /* get matredundant over subcomm */
10032 if (reuse == MAT_INITIAL_MATRIX) {
10033 ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,matseq[0],nloc_sub,reuse,matredundant);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10033,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10034
10035 /* create a supporting struct and attach it to C for reuse */
10036 ierr = PetscNewLog(*matredundant,&redund)(PetscMallocA(1,PETSC_TRUE,10036,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**(((&redund)))),(((&redund)))) ||
PetscLogObjectMemory((PetscObject)*matredundant,sizeof(**(&
redund))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10036,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10037 (*matredundant)->redundant = redund;
10038 redund->isrow = isrow;
10039 redund->iscol = iscol;
10040 redund->matseq = matseq;
10041 if (newsubcomm) {
10042 redund->subcomm = subcomm;
10043 } else {
10044 redund->subcomm = MPI_COMM_NULL((MPI_Comm)0x04000000);
10045 }
10046 } else {
10047 ierr = MatCreateMPIMatConcatenateSeqMat(subcomm,matseq[0],PETSC_DECIDE-1,reuse,matredundant);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10047,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10048 }
10049 ierr = PetscLogEventEnd(MAT_RedundantMat,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_RedundantMat].active) ? (*PetscLogPLE)((MAT_RedundantMat
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10049,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10050 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10051}
10052
10053/*@C
10054 MatGetMultiProcBlock - Create multiple [bjacobi] 'parallel submatrices' from
10055 a given 'mat' object. Each submatrix can span multiple procs.
10056
10057 Collective on Mat
10058
10059 Input Parameters:
10060+ mat - the matrix
10061. subcomm - the subcommunicator obtained by com_split(comm)
10062- scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10063
10064 Output Parameter:
10065. subMat - 'parallel submatrices each spans a given subcomm
10066
10067 Notes:
10068 The submatrix partition across processors is dictated by 'subComm' a
10069 communicator obtained by com_split(comm). The comm_split
10070 is not restriced to be grouped with consecutive original ranks.
10071
10072 Due the comm_split() usage, the parallel layout of the submatrices
10073 map directly to the layout of the original matrix [wrt the local
10074 row,col partitioning]. So the original 'DiagonalMat' naturally maps
10075 into the 'DiagonalMat' of the subMat, hence it is used directly from
10076 the subMat. However the offDiagMat looses some columns - and this is
10077 reconstructed with MatSetValues()
10078
10079 Level: advanced
10080
10081
10082.seealso: MatCreateSubMatrices()
10083@*/
10084PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall,Mat *subMat)
10085{
10086 PetscErrorCode ierr;
10087 PetscMPIInt commsize,subCommSize;
10088
10089 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10089; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10090 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&commsize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10090,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10091 ierr = MPI_Comm_size(subComm,&subCommSize);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10091,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10092 if (subCommSize > commsize) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"CommSize %D < SubCommZize %D",commsize,subCommSize)return PetscError(PetscObjectComm((PetscObject)mat),10092,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",63,PETSC_ERROR_INITIAL
,"CommSize %D < SubCommZize %D",commsize,subCommSize)
;
10093
10094 if (scall == MAT_REUSE_MATRIX && *subMat == mat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix")return PetscError(((MPI_Comm)0x44000001),10094,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix"
)
;
10095 ierr = PetscLogEventBegin(MAT_GetMultiProcBlock,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetMultiProcBlock].active) ? (*PetscLogPLB)((MAT_GetMultiProcBlock
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10095,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10096 ierr = (*mat->ops->getmultiprocblock)(mat,subComm,scall,subMat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10096,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10097 ierr = PetscLogEventEnd(MAT_GetMultiProcBlock,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_GetMultiProcBlock].active) ? (*PetscLogPLE)((MAT_GetMultiProcBlock
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10097,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10098 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10099}
10100
10101/*@
10102 MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10103
10104 Not Collective
10105
10106 Input Arguments:
10107 mat - matrix to extract local submatrix from
10108 isrow - local row indices for submatrix
10109 iscol - local column indices for submatrix
10110
10111 Output Arguments:
10112 submat - the submatrix
10113
10114 Level: intermediate
10115
10116 Notes:
10117 The submat should be returned with MatRestoreLocalSubMatrix().
10118
10119 Depending on the format of mat, the returned submat may not implement MatMult(). Its communicator may be
10120 the same as mat, it may be PETSC_COMM_SELF, or some other subcomm of mat's.
10121
10122 The submat always implements MatSetValuesLocal(). If isrow and iscol have the same block size, then
10123 MatSetValuesBlockedLocal() will also be implemented.
10124
10125 The mat must have had a ISLocalToGlobalMapping provided to it with MatSetLocalToGlobalMapping(). Note that
10126 matrices obtained with DMCreateMatrix() generally already have the local to global mapping provided.
10127
10128.seealso: MatRestoreLocalSubMatrix(), MatCreateLocalRef(), MatSetLocalToGlobalMapping()
10129@*/
10130PetscErrorCode MatGetLocalSubMatrix(Mat mat,IS isrow,IS iscol,Mat *submat)
10131{
10132 PetscErrorCode ierr;
10133
10134 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10134; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10135 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10135
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10135,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10135,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10136 PetscValidHeaderSpecific(isrow,IS_CLASSID,2)do { if (!isrow) return PetscError(((MPI_Comm)0x44000001),10136
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(isrow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10136,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(isrow))->classid != IS_CLASSID) { if
(((PetscObject)(isrow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10136,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),10136,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
10137 PetscValidHeaderSpecific(iscol,IS_CLASSID,3)do { if (!iscol) return PetscError(((MPI_Comm)0x44000001),10137
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(iscol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10137,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(iscol))->classid != IS_CLASSID) { if
(((PetscObject)(iscol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10137,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),10137,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
10138 PetscCheckSameComm(isrow,2,iscol,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)isrow),PetscObjectComm((PetscObject
)iscol),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)
) return PetscError(((MPI_Comm)0x44000001),10138,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),10138,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,2,3,_7_flag); } while (0)
;
10139 PetscValidPointer(submat,4)do { if (!submat) return PetscError(((MPI_Comm)0x44000001),10139
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(submat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),10139,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
10140 if (!mat->rmap->mapping) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Matrix must have local to global mapping provided before this call")return PetscError(PetscObjectComm((PetscObject)mat),10140,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Matrix must have local to global mapping provided before this call"
)
;
10141
10142 if (mat->ops->getlocalsubmatrix) {
10143 ierr = (*mat->ops->getlocalsubmatrix)(mat,isrow,iscol,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10143,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10144 } else {
10145 ierr = MatCreateLocalRef(mat,isrow,iscol,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10145,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10146 }
10147 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10148}
10149
10150/*@
10151 MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering
10152
10153 Not Collective
10154
10155 Input Arguments:
10156 mat - matrix to extract local submatrix from
10157 isrow - local row indices for submatrix
10158 iscol - local column indices for submatrix
10159 submat - the submatrix
10160
10161 Level: intermediate
10162
10163.seealso: MatGetLocalSubMatrix()
10164@*/
10165PetscErrorCode MatRestoreLocalSubMatrix(Mat mat,IS isrow,IS iscol,Mat *submat)
10166{
10167 PetscErrorCode ierr;
10168
10169 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10169; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10170 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10170
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10170,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10170,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10170,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10171 PetscValidHeaderSpecific(isrow,IS_CLASSID,2)do { if (!isrow) return PetscError(((MPI_Comm)0x44000001),10171
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(isrow,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10171,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(isrow))->classid != IS_CLASSID) { if
(((PetscObject)(isrow))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10171,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),10171,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
10172 PetscValidHeaderSpecific(iscol,IS_CLASSID,3)do { if (!iscol) return PetscError(((MPI_Comm)0x44000001),10172
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(iscol,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10172,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(iscol))->classid != IS_CLASSID) { if
(((PetscObject)(iscol))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10172,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),10172,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
10173 PetscCheckSameComm(isrow,2,iscol,3)do { PetscErrorCode _7_ierr; PetscMPIInt _7_flag; _7_ierr = MPI_Comm_compare
(PetscObjectComm((PetscObject)isrow),PetscObjectComm((PetscObject
)iscol),&_7_flag);do {if (__builtin_expect(!!(_7_ierr),0)
) return PetscError(((MPI_Comm)0x44000001),10173,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,_7_ierr,PETSC_ERROR_REPEAT," ");} while (0); if (_7_flag != 1
&& _7_flag != 0) return PetscError(((MPI_Comm)0x44000001
),10173,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,80,PETSC_ERROR_INITIAL,"Different communicators in the two objects: Argument # %d and %d flag %d"
,2,3,_7_flag); } while (0)
;
10174 PetscValidPointer(submat,4)do { if (!submat) return PetscError(((MPI_Comm)0x44000001),10174
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",4); if
(!PetscCheckPointer(submat,PETSC_CHAR)) return PetscError(((
MPI_Comm)0x44000001),10174,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",4);
} while (0)
;
10175 if (*submat) {
10176 PetscValidHeaderSpecific(*submat,MAT_CLASSID,4)do { if (!*submat) return PetscError(((MPI_Comm)0x44000001),10176
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",4); if (
!PetscCheckPointer(*submat,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),10176,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,4); if (((PetscObject)(*submat))->classid != MAT_CLASSID)
{ if (((PetscObject)(*submat))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10176,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,4); else return PetscError(((MPI_Comm)0x44000001),10176,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",4); } } while (0)
;
10177 }
10178
10179 if (mat->ops->restorelocalsubmatrix) {
10180 ierr = (*mat->ops->restorelocalsubmatrix)(mat,isrow,iscol,submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10180,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10181 } else {
10182 ierr = MatDestroy(submat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10182,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10183 }
10184 *submat = NULL((void*)0);
10185 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10186}
10187
10188/* --------------------------------------------------------*/
10189/*@
10190 MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10191
10192 Collective on Mat
10193
10194 Input Parameter:
10195. mat - the matrix
10196
10197 Output Parameter:
10198. is - if any rows have zero diagonals this contains the list of them
10199
10200 Level: developer
10201
10202.seealso: MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
10203@*/
10204PetscErrorCode MatFindZeroDiagonals(Mat mat,IS *is)
10205{
10206 PetscErrorCode ierr;
10207
10208 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10208; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10209 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10209
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10209,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10209,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10210 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10210,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10211 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),10211,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
10212 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),10212,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
10213
10214 if (!mat->ops->findzerodiagonals) {
10215 Vec diag;
10216 const PetscScalar *a;
10217 PetscInt *rows;
10218 PetscInt rStart, rEnd, r, nrow = 0;
10219
10220 ierr = MatCreateVecs(mat, &diag, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10220,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10221 ierr = MatGetDiagonal(mat, diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10221,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10222 ierr = MatGetOwnershipRange(mat, &rStart, &rEnd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10222,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10223 ierr = VecGetArrayRead(diag, &a);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10223,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10224 for (r = 0; r < rEnd-rStart; ++r) if (a[r] == 0.0) ++nrow;
10225 ierr = PetscMalloc1(nrow, &rows)PetscMallocA(1,PETSC_FALSE,10225,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(nrow)*sizeof(**(&rows)),(&rows))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10225,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10226 nrow = 0;
10227 for (r = 0; r < rEnd-rStart; ++r) if (a[r] == 0.0) rows[nrow++] = r+rStart;
10228 ierr = VecRestoreArrayRead(diag, &a);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10228,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10229 ierr = VecDestroy(&diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10229,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10230 ierr = ISCreateGeneral(PetscObjectComm((PetscObject) mat), nrow, rows, PETSC_OWN_POINTER, is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10230,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10231 } else {
10232 ierr = (*mat->ops->findzerodiagonals)(mat, is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10232,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10233 }
10234 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10235}
10236
10237/*@
10238 MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10239
10240 Collective on Mat
10241
10242 Input Parameter:
10243. mat - the matrix
10244
10245 Output Parameter:
10246. is - contains the list of rows with off block diagonal entries
10247
10248 Level: developer
10249
10250.seealso: MatMultTranspose(), MatMultAdd(), MatMultTransposeAdd()
10251@*/
10252PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat,IS *is)
10253{
10254 PetscErrorCode ierr;
10255
10256 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10256; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10257 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10257
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10257,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10257,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10257,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10258 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10258,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10259 if (!mat->assembled) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(PetscObjectComm((PetscObject)mat),10259,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for unassembled matrix")
;
10260 if (mat->factortype) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(PetscObjectComm((PetscObject)mat),10260,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",73,PETSC_ERROR_INITIAL
,"Not for factored matrix")
;
10261
10262 if (!mat->ops->findoffblockdiagonalentries) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"This matrix type does not have a find off block diagonal entries defined")return PetscError(PetscObjectComm((PetscObject)mat),10262,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"This matrix type does not have a find off block diagonal entries defined"
)
;
10263 ierr = (*mat->ops->findoffblockdiagonalentries)(mat,is);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10263,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10264 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10265}
10266
10267/*@C
10268 MatInvertBlockDiagonal - Inverts the block diagonal entries.
10269
10270 Collective on Mat
10271
10272 Input Parameters:
10273. mat - the matrix
10274
10275 Output Parameters:
10276. values - the block inverses in column major order (FORTRAN-like)
10277
10278 Note:
10279 This routine is not available from Fortran.
10280
10281 Level: advanced
10282
10283.seealso: MatInvertBockDiagonalMat
10284@*/
10285PetscErrorCode MatInvertBlockDiagonal(Mat mat,const PetscScalar **values)
10286{
10287 PetscErrorCode ierr;
10288
10289 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10289; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10290 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10290
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10290,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10290,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10290,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10291 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),10291,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
10292 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),10292,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
10293 if (!mat->ops->invertblockdiagonal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported")return PetscError(((MPI_Comm)0x44000001),10293,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Not supported")
;
10294 ierr = (*mat->ops->invertblockdiagonal)(mat,values);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10294,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10295 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10296}
10297
10298/*@C
10299 MatInvertVariableBlockDiagonal - Inverts the block diagonal entries.
10300
10301 Collective on Mat
10302
10303 Input Parameters:
10304+ mat - the matrix
10305. nblocks - the number of blocks
10306- bsizes - the size of each block
10307
10308 Output Parameters:
10309. values - the block inverses in column major order (FORTRAN-like)
10310
10311 Note:
10312 This routine is not available from Fortran.
10313
10314 Level: advanced
10315
10316.seealso: MatInvertBockDiagonal()
10317@*/
10318PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat,PetscInt nblocks,const PetscInt *bsizes,PetscScalar *values)
10319{
10320 PetscErrorCode ierr;
10321
10322 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10322; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10323 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10323
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10323,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10323,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10323,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10324 if (!mat->assembled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix")return PetscError(((MPI_Comm)0x44000001),10324,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for unassembled matrix")
;
10325 if (mat->factortype) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix")return PetscError(((MPI_Comm)0x44000001),10325,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"Not for factored matrix")
;
10326 if (!mat->ops->invertvariableblockdiagonal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported")return PetscError(((MPI_Comm)0x44000001),10326,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Not supported")
;
10327 ierr = (*mat->ops->invertvariableblockdiagonal)(mat,nblocks,bsizes,values);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10327,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10328 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10329}
10330
10331/*@
10332 MatInvertBlockDiagonalMat - set matrix C to be the inverted block diagonal of matrix A
10333
10334 Collective on Mat
10335
10336 Input Parameters:
10337. A - the matrix
10338
10339 Output Parameters:
10340. C - matrix with inverted block diagonal of A. This matrix should be created and may have its type set.
10341
10342 Notes: the blocksize of the matrix is used to determine the blocks on the diagonal of C
10343
10344 Level: advanced
10345
10346.seealso: MatInvertBockDiagonal()
10347@*/
10348PetscErrorCode MatInvertBlockDiagonalMat(Mat A,Mat C)
10349{
10350 PetscErrorCode ierr;
10351 const PetscScalar *vals;
10352 PetscInt *dnnz;
10353 PetscInt M,N,m,n,rstart,rend,bs,i,j;
10354
10355 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10355; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10356 ierr = MatInvertBlockDiagonal(A,&vals);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10356,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10357 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10357,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10358 ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10358,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10359 ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10359,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10360 ierr = MatSetSizes(C,m,n,M,N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10360,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10361 ierr = MatSetBlockSize(C,bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10361,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10362 ierr = PetscMalloc1(m/bs,&dnnz)PetscMallocA(1,PETSC_FALSE,10362,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(m/bs)*sizeof(**(&dnnz)),(&dnnz))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10362,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10363 for (j = 0; j < m/bs; j++) dnnz[j] = 1;
10364 ierr = MatXAIJSetPreallocation(C,bs,dnnz,NULL((void*)0),NULL((void*)0),NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10364,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10365 ierr = PetscFree(dnnz)((*PetscTrFree)((void*)(dnnz),10365,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((dnnz) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10365,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10366 ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10366,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10367 ierr = MatSetOption(C,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10367,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10368 for (i = rstart/bs; i < rend/bs; i++) {
10369 ierr = MatSetValuesBlocked(C,1,&i,1,&i,&vals[(i-rstart/bs)*bs*bs],INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10369,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10370 }
10371 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10371,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10372 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10372,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10373 ierr = MatSetOption(C,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10373,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10374 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10375}
10376
10377/*@C
10378 MatTransposeColoringDestroy - Destroys a coloring context for matrix product C=A*B^T that was created
10379 via MatTransposeColoringCreate().
10380
10381 Collective on MatTransposeColoring
10382
10383 Input Parameter:
10384. c - coloring context
10385
10386 Level: intermediate
10387
10388.seealso: MatTransposeColoringCreate()
10389@*/
10390PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10391{
10392 PetscErrorCode ierr;
10393 MatTransposeColoring matcolor=*c;
10394
10395 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10395; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10396 if (!matcolor) PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10397 if (--((PetscObject)matcolor)->refct > 0) {matcolor = 0; PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;}
10398
10399 ierr = PetscFree3(matcolor->ncolumns,matcolor->nrows,matcolor->colorforrow)PetscFreeA(3,10399,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,&(matcolor->ncolumns),&(matcolor->nrows),&
(matcolor->colorforrow))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10399,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10400 ierr = PetscFree(matcolor->rows)((*PetscTrFree)((void*)(matcolor->rows),10400,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((matcolor->rows) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10400,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10401 ierr = PetscFree(matcolor->den2sp)((*PetscTrFree)((void*)(matcolor->den2sp),10401,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((matcolor->den2sp) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10401,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10402 ierr = PetscFree(matcolor->colorforcol)((*PetscTrFree)((void*)(matcolor->colorforcol),10402,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c") || (
(matcolor->colorforcol) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10402,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10403 ierr = PetscFree(matcolor->columns)((*PetscTrFree)((void*)(matcolor->columns),10403,__func__,
"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c") || ((
matcolor->columns) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10403,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10404 if (matcolor->brows>0) {
10405 ierr = PetscFree(matcolor->lstart)((*PetscTrFree)((void*)(matcolor->lstart),10405,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((matcolor->lstart) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10405,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10406 }
10407 ierr = PetscHeaderDestroy(c)(PetscHeaderDestroy_Private((PetscObject)(*c)) || ((*PetscTrFree
)((void*)(*c),10407,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
) || ((*c) = 0,0)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10407,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10408 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10409}
10410
10411/*@C
10412 MatTransColoringApplySpToDen - Given a symbolic matrix product C=A*B^T for which
10413 a MatTransposeColoring context has been created, computes a dense B^T by Apply
10414 MatTransposeColoring to sparse B.
10415
10416 Collective on MatTransposeColoring
10417
10418 Input Parameters:
10419+ B - sparse matrix B
10420. Btdense - symbolic dense matrix B^T
10421- coloring - coloring context created with MatTransposeColoringCreate()
10422
10423 Output Parameter:
10424. Btdense - dense matrix B^T
10425
10426 Level: advanced
10427
10428 Notes:
10429 These are used internally for some implementations of MatRARt()
10430
10431.seealso: MatTransposeColoringCreate(), MatTransposeColoringDestroy(), MatTransColoringApplyDenToSp()
10432
10433@*/
10434PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring,Mat B,Mat Btdense)
10435{
10436 PetscErrorCode ierr;
10437
10438 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10438; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10439 PetscValidHeaderSpecific(B,MAT_CLASSID,1)do { if (!B) return PetscError(((MPI_Comm)0x44000001),10439,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",85,PETSC_ERROR_INITIAL
,"Null Object: Parameter # %d",1); if (!PetscCheckPointer(B,PETSC_OBJECT
)) return PetscError(((MPI_Comm)0x44000001),10439,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(B))->classid != MAT_CLASSID) { if (
((PetscObject)(B))->classid == -1) return PetscError(((MPI_Comm
)0x44000001),10439,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10439,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10440 PetscValidHeaderSpecific(Btdense,MAT_CLASSID,2)do { if (!Btdense) return PetscError(((MPI_Comm)0x44000001),10440
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(Btdense,PETSC_OBJECT)) return PetscError((
(MPI_Comm)0x44000001),10440,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(Btdense))->classid != MAT_CLASSID)
{ if (((PetscObject)(Btdense))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10440,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),10440,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
10441 PetscValidHeaderSpecific(coloring,MAT_TRANSPOSECOLORING_CLASSID,3)do { if (!coloring) return PetscError(((MPI_Comm)0x44000001),
10441,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(coloring,PETSC_OBJECT)) return PetscError(
((MPI_Comm)0x44000001),10441,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(coloring))->classid != MAT_TRANSPOSECOLORING_CLASSID
) { if (((PetscObject)(coloring))->classid == -1) return PetscError
(((MPI_Comm)0x44000001),10441,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),10441,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
10442
10443 if (!B->ops->transcoloringapplysptoden) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported for this matrix type %s",((PetscObject)B)->type_name)return PetscError(((MPI_Comm)0x44000001),10443,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Not supported for this matrix type %s"
,((PetscObject)B)->type_name)
;
10444 ierr = (B->ops->transcoloringapplysptoden)(coloring,B,Btdense);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10444,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10445 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10446}
10447
10448/*@C
10449 MatTransColoringApplyDenToSp - Given a symbolic matrix product Csp=A*B^T for which
10450 a MatTransposeColoring context has been created and a dense matrix Cden=A*Btdense
10451 in which Btdens is obtained from MatTransColoringApplySpToDen(), recover sparse matrix
10452 Csp from Cden.
10453
10454 Collective on MatTransposeColoring
10455
10456 Input Parameters:
10457+ coloring - coloring context created with MatTransposeColoringCreate()
10458- Cden - matrix product of a sparse matrix and a dense matrix Btdense
10459
10460 Output Parameter:
10461. Csp - sparse matrix
10462
10463 Level: advanced
10464
10465 Notes:
10466 These are used internally for some implementations of MatRARt()
10467
10468.seealso: MatTransposeColoringCreate(), MatTransposeColoringDestroy(), MatTransColoringApplySpToDen()
10469
10470@*/
10471PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring,Mat Cden,Mat Csp)
10472{
10473 PetscErrorCode ierr;
10474
10475 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10475; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10476 PetscValidHeaderSpecific(matcoloring,MAT_TRANSPOSECOLORING_CLASSID,1)do { if (!matcoloring) return PetscError(((MPI_Comm)0x44000001
),10476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(matcoloring,PETSC_OBJECT)) return PetscError
(((MPI_Comm)0x44000001),10476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(matcoloring))->classid != MAT_TRANSPOSECOLORING_CLASSID
) { if (((PetscObject)(matcoloring))->classid == -1) return
PetscError(((MPI_Comm)0x44000001),10476,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10476,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10477 PetscValidHeaderSpecific(Cden,MAT_CLASSID,2)do { if (!Cden) return PetscError(((MPI_Comm)0x44000001),10477
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",2); if (
!PetscCheckPointer(Cden,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10477,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,2); if (((PetscObject)(Cden))->classid != MAT_CLASSID) { if
(((PetscObject)(Cden))->classid == -1) return PetscError(
((MPI_Comm)0x44000001),10477,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,2); else return PetscError(((MPI_Comm)0x44000001),10477,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",2); } } while (0)
;
10478 PetscValidHeaderSpecific(Csp,MAT_CLASSID,3)do { if (!Csp) return PetscError(((MPI_Comm)0x44000001),10478
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",3); if (
!PetscCheckPointer(Csp,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10478,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,3); if (((PetscObject)(Csp))->classid != MAT_CLASSID) { if
(((PetscObject)(Csp))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10478,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,3); else return PetscError(((MPI_Comm)0x44000001),10478,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",3); } } while (0)
;
10479
10480 if (!Csp->ops->transcoloringapplydentosp) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported for this matrix type %s",((PetscObject)Csp)->type_name)return PetscError(((MPI_Comm)0x44000001),10480,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Not supported for this matrix type %s"
,((PetscObject)Csp)->type_name)
;
10481 ierr = (Csp->ops->transcoloringapplydentosp)(matcoloring,Cden,Csp);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10481,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10482 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10483}
10484
10485/*@C
10486 MatTransposeColoringCreate - Creates a matrix coloring context for matrix product C=A*B^T.
10487
10488 Collective on Mat
10489
10490 Input Parameters:
10491+ mat - the matrix product C
10492- iscoloring - the coloring of the matrix; usually obtained with MatColoringCreate() or DMCreateColoring()
10493
10494 Output Parameter:
10495. color - the new coloring context
10496
10497 Level: intermediate
10498
10499.seealso: MatTransposeColoringDestroy(), MatTransColoringApplySpToDen(),
10500 MatTransColoringApplyDenToSp()
10501@*/
10502PetscErrorCode MatTransposeColoringCreate(Mat mat,ISColoring iscoloring,MatTransposeColoring *color)
10503{
10504 MatTransposeColoring c;
10505 MPI_Comm comm;
10506 PetscErrorCode ierr;
10507
10508 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10508; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10509 ierr = PetscLogEventBegin(MAT_TransposeColoringCreate,mat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeColoringCreate].active) ? (*PetscLogPLB)((MAT_TransposeColoringCreate
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10509,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10510 ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10510,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10511 ierr = PetscHeaderCreate(c,MAT_TRANSPOSECOLORING_CLASSID,"MatTransposeColoring","Matrix product C=A*B^T via coloring","Mat",comm,MatTransposeColoringDestroy,NULL)(PetscMallocA(1,PETSC_TRUE,10511,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**((&(c)))),((&(c)))) || PetscHeaderCreate_Private
((PetscObject)c,MAT_TRANSPOSECOLORING_CLASSID,"MatTransposeColoring"
,"Matrix product C=A*B^T via coloring","Mat",comm,(PetscObjectDestroyFunction
)MatTransposeColoringDestroy,(PetscObjectViewFunction)((void*
)0)) || ((PetscLogPHC) ? (*PetscLogPHC)((PetscObject)(c)) : 0
) || PetscLogObjectMemory((PetscObject)c,sizeof(*(c))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10511,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10512
10513 c->ctype = iscoloring->ctype;
10514 if (mat->ops->transposecoloringcreate) {
10515 ierr = (*mat->ops->transposecoloringcreate)(mat,iscoloring,c);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10515,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10516 } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Code not yet written for this matrix type")return PetscError(PetscObjectComm((PetscObject)mat),10516,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Code not yet written for this matrix type")
;
10517
10518 *color = c;
10519 ierr = PetscLogEventEnd(MAT_TransposeColoringCreate,mat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeColoringCreate].active) ? (*PetscLogPLE)((MAT_TransposeColoringCreate
),0,(PetscObject)(mat),(PetscObject)(0),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10519,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10520 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10521}
10522
10523/*@
10524 MatGetNonzeroState - Returns a 64 bit integer representing the current state of nonzeros in the matrix. If the
10525 matrix has had no new nonzero locations added to the matrix since the previous call then the value will be the
10526 same, otherwise it will be larger
10527
10528 Not Collective
10529
10530 Input Parameter:
10531. A - the matrix
10532
10533 Output Parameter:
10534. state - the current state
10535
10536 Notes:
10537 You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10538 different matrices
10539
10540 Level: intermediate
10541
10542@*/
10543PetscErrorCode MatGetNonzeroState(Mat mat,PetscObjectState *state)
10544{
10545 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10545; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10546 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10546
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10546,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10546,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10547 *state = mat->nonzerostate;
10548 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10549}
10550
10551/*@
10552 MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
10553 matrices from each processor
10554
10555 Collective
10556
10557 Input Parameters:
10558+ comm - the communicators the parallel matrix will live on
10559. seqmat - the input sequential matrices
10560. n - number of local columns (or PETSC_DECIDE)
10561- reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10562
10563 Output Parameter:
10564. mpimat - the parallel matrix generated
10565
10566 Level: advanced
10567
10568 Notes:
10569 The number of columns of the matrix in EACH processor MUST be the same.
10570
10571@*/
10572PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm,Mat seqmat,PetscInt n,MatReuse reuse,Mat *mpimat)
10573{
10574 PetscErrorCode ierr;
10575
10576 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10576; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10577 if (!seqmat->ops->creatempimatconcatenateseqmat) SETERRQ1(PetscObjectComm((PetscObject)seqmat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)seqmat)->type_name)return PetscError(PetscObjectComm((PetscObject)seqmat),10577,
__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,56,PETSC_ERROR_INITIAL,"Mat type %s",((PetscObject)seqmat)->
type_name)
;
10578 if (reuse == MAT_REUSE_MATRIX && seqmat == *mpimat) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix")return PetscError(((MPI_Comm)0x44000001),10578,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix"
)
;
10579
10580 ierr = PetscLogEventBegin(MAT_Merge,seqmat,0,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Merge].active) ? (*PetscLogPLB)((MAT_Merge),0,(PetscObject
)(seqmat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10580,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10581 ierr = (*seqmat->ops->creatempimatconcatenateseqmat)(comm,seqmat,n,reuse,mpimat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10581,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10582 ierr = PetscLogEventEnd(MAT_Merge,seqmat,0,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_Merge].active) ? (*PetscLogPLE)((MAT_Merge),0,(PetscObject
)(seqmat),(PetscObject)(0),(PetscObject)(0),(PetscObject)(0))
: 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10582,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10583 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10584}
10585
10586/*@
10587 MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent
10588 ranks' ownership ranges.
10589
10590 Collective on A
10591
10592 Input Parameters:
10593+ A - the matrix to create subdomains from
10594- N - requested number of subdomains
10595
10596
10597 Output Parameters:
10598+ n - number of subdomains resulting on this rank
10599- iss - IS list with indices of subdomains on this rank
10600
10601 Level: advanced
10602
10603 Notes:
10604 number of subdomains must be smaller than the communicator size
10605@*/
10606PetscErrorCode MatSubdomainsCreateCoalesce(Mat A,PetscInt N,PetscInt *n,IS *iss[])
10607{
10608 MPI_Comm comm,subcomm;
10609 PetscMPIInt size,rank,color;
10610 PetscInt rstart,rend,k;
10611 PetscErrorCode ierr;
10612
10613 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10613; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10614 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10614,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10615 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10615,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10616 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10616,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10617 if (N < 1 || N >= (PetscInt)size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subdomains must be > 0 and < %D, got N = %D",size,N)return PetscError(((MPI_Comm)0x44000001),10617,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,62,PETSC_ERROR_INITIAL,"number of subdomains must be > 0 and < %D, got N = %D"
,size,N)
;
10618 *n = 1;
10619 k = ((PetscInt)size)/N + ((PetscInt)size%N>0); /* There are up to k ranks to a color */
10620 color = rank/k;
10621 ierr = MPI_Comm_split(comm,color,rank,&subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10621,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10622 ierr = PetscMalloc1(1,iss)PetscMallocA(1,PETSC_FALSE,10622,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,(size_t)(1)*sizeof(**(iss)),(iss))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10622,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10623 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10623,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10624 ierr = ISCreateStride(subcomm,rend-rstart,rstart,1,iss[0]);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10624,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10625 ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10625,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10626 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10627}
10628
10629/*@
10630 MatGalerkin - Constructs the coarse grid problem via Galerkin projection.
10631
10632 If the interpolation and restriction operators are the same, uses MatPtAP.
10633 If they are not the same, use MatMatMatMult.
10634
10635 Once the coarse grid problem is constructed, correct for interpolation operators
10636 that are not of full rank, which can legitimately happen in the case of non-nested
10637 geometric multigrid.
10638
10639 Input Parameters:
10640+ restrct - restriction operator
10641. dA - fine grid matrix
10642. interpolate - interpolation operator
10643. reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10644- fill - expected fill, use PETSC_DEFAULT if you do not have a good estimate
10645
10646 Output Parameters:
10647. A - the Galerkin coarse matrix
10648
10649 Options Database Key:
10650. -pc_mg_galerkin <both,pmat,mat,none>
10651
10652 Level: developer
10653
10654.seealso: MatPtAP(), MatMatMatMult()
10655@*/
10656PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
10657{
10658 PetscErrorCode ierr;
10659 IS zerorows;
10660 Vec diag;
10661
10662 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10662; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10663 if (reuse == MAT_INPLACE_MATRIX) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported")return PetscError(PetscObjectComm((PetscObject)A),10663,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",56,PETSC_ERROR_INITIAL
,"Inplace product not supported")
;
10664 /* Construct the coarse grid matrix */
10665 if (interpolate == restrct) {
10666 ierr = MatPtAP(dA,interpolate,reuse,fill,A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10666,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10667 } else {
10668 ierr = MatMatMatMult(restrct,dA,interpolate,reuse,fill,A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10668,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10669 }
10670
10671 /* If the interpolation matrix is not of full rank, A will have zero rows.
10672 This can legitimately happen in the case of non-nested geometric multigrid.
10673 In that event, we set the rows of the matrix to the rows of the identity,
10674 ignoring the equations (as the RHS will also be zero). */
10675
10676 ierr = MatFindZeroRows(*A, &zerorows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10676,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10677
10678 if (zerorows != NULL((void*)0)) { /* if there are any zero rows */
10679 ierr = MatCreateVecs(*A, &diag, NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10679,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10680 ierr = MatGetDiagonal(*A, diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10680,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10681 ierr = VecISSet(diag, zerorows, 1.0);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10681,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10682 ierr = MatDiagonalSet(*A, diag, INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10682,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10683 ierr = VecDestroy(&diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10683,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10684 ierr = ISDestroy(&zerorows);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10684,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10685 }
10686 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10687}
10688
10689/*@C
10690 MatSetOperation - Allows user to set a matrix operation for any matrix type
10691
10692 Logically Collective on Mat
10693
10694 Input Parameters:
10695+ mat - the matrix
10696. op - the name of the operation
10697- f - the function that provides the operation
10698
10699 Level: developer
10700
10701 Usage:
10702$ extern PetscErrorCode usermult(Mat,Vec,Vec);
10703$ ierr = MatCreateXXX(comm,...&A);
10704$ ierr = MatSetOperation(A,MATOP_MULT,(void(*)(void))usermult);
10705
10706 Notes:
10707 See the file include/petscmat.h for a complete list of matrix
10708 operations, which all have the form MATOP_<OPERATION>, where
10709 <OPERATION> is the name (in all capital letters) of the
10710 user interface routine (e.g., MatMult() -> MATOP_MULT).
10711
10712 All user-provided functions (except for MATOP_DESTROY) should have the same calling
10713 sequence as the usual matrix interface routines, since they
10714 are intended to be accessed via the usual matrix interface
10715 routines, e.g.,
10716$ MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
10717
10718 In particular each function MUST return an error code of 0 on success and
10719 nonzero on failure.
10720
10721 This routine is distinct from MatShellSetOperation() in that it can be called on any matrix type.
10722
10723.seealso: MatGetOperation(), MatCreateShell(), MatShellSetContext(), MatShellSetOperation()
10724@*/
10725PetscErrorCode MatSetOperation(Mat mat,MatOperation op,void (*f)(void))
10726{
10727 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10727; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10728 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10728
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10728,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10728,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10728,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10729 if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))(mat->ops->view)) {
10730 mat->ops->viewnative = mat->ops->view;
10731 }
10732 (((void(**)(void))mat->ops)[op]) = f;
10733 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10734}
10735
10736/*@C
10737 MatGetOperation - Gets a matrix operation for any matrix type.
10738
10739 Not Collective
10740
10741 Input Parameters:
10742+ mat - the matrix
10743- op - the name of the operation
10744
10745 Output Parameter:
10746. f - the function that provides the operation
10747
10748 Level: developer
10749
10750 Usage:
10751$ PetscErrorCode (*usermult)(Mat,Vec,Vec);
10752$ ierr = MatGetOperation(A,MATOP_MULT,(void(**)(void))&usermult);
10753
10754 Notes:
10755 See the file include/petscmat.h for a complete list of matrix
10756 operations, which all have the form MATOP_<OPERATION>, where
10757 <OPERATION> is the name (in all capital letters) of the
10758 user interface routine (e.g., MatMult() -> MATOP_MULT).
10759
10760 This routine is distinct from MatShellGetOperation() in that it can be called on any matrix type.
10761
10762.seealso: MatSetOperation(), MatCreateShell(), MatShellGetContext(), MatShellGetOperation()
10763@*/
10764PetscErrorCode MatGetOperation(Mat mat,MatOperation op,void(**f)(void))
10765{
10766 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10766; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10767 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10767
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10767,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10767,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10767,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10768 *f = (((void (**)(void))mat->ops)[op]);
10769 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10770}
10771
10772/*@
10773 MatHasOperation - Determines whether the given matrix supports the particular
10774 operation.
10775
10776 Not Collective
10777
10778 Input Parameters:
10779+ mat - the matrix
10780- op - the operation, for example, MATOP_GET_DIAGONAL
10781
10782 Output Parameter:
10783. has - either PETSC_TRUE or PETSC_FALSE
10784
10785 Level: advanced
10786
10787 Notes:
10788 See the file include/petscmat.h for a complete list of matrix
10789 operations, which all have the form MATOP_<OPERATION>, where
10790 <OPERATION> is the name (in all capital letters) of the
10791 user-level routine. E.g., MatNorm() -> MATOP_NORM.
10792
10793.seealso: MatCreateShell()
10794@*/
10795PetscErrorCode MatHasOperation(Mat mat,MatOperation op,PetscBool *has)
10796{
10797 PetscErrorCode ierr;
10798
10799 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10799; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10800 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10800
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10800,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10800,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10800,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10801 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10801,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10802 PetscValidPointer(has,3)do { if (!has) return PetscError(((MPI_Comm)0x44000001),10802
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",3); if
(!PetscCheckPointer(has,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),10802,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",3);
} while (0)
;
10803 if (mat->ops->hasoperation) {
10804 ierr = (*mat->ops->hasoperation)(mat,op,has);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10804,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10805 } else {
10806 if (((void**)mat->ops)[op]) *has = PETSC_TRUE;
10807 else {
10808 *has = PETSC_FALSE;
10809 if (op == MATOP_CREATE_SUBMATRIX) {
10810 PetscMPIInt size;
10811
10812 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10812,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10813 if (size == 1) {
10814 ierr = MatHasOperation(mat,MATOP_CREATE_SUBMATRICES,has);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10814,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10815 }
10816 }
10817 }
10818 }
10819 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10820}
10821
10822/*@
10823 MatHasCongruentLayouts - Determines whether the rows and columns layouts
10824 of the matrix are congruent
10825
10826 Collective on mat
10827
10828 Input Parameters:
10829. mat - the matrix
10830
10831 Output Parameter:
10832. cong - either PETSC_TRUE or PETSC_FALSE
10833
10834 Level: beginner
10835
10836 Notes:
10837
10838.seealso: MatCreate(), MatSetSizes()
10839@*/
10840PetscErrorCode MatHasCongruentLayouts(Mat mat,PetscBool *cong)
10841{
10842 PetscErrorCode ierr;
10843
10844 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10844; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10845 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10845
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10845,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10845,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10845,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10846 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10846,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10847 PetscValidPointer(cong,2)do { if (!cong) return PetscError(((MPI_Comm)0x44000001),10847
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Pointer: Parameter # %d",2); if
(!PetscCheckPointer(cong,PETSC_CHAR)) return PetscError(((MPI_Comm
)0x44000001),10847,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,68,PETSC_ERROR_INITIAL,"Invalid Pointer: Parameter # %d",2);
} while (0)
;
10848 if (!mat->rmap || !mat->cmap) {
10849 *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
10850 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10851 }
10852 if (mat->congruentlayouts == PETSC_DECIDE-1) { /* first time we compare rows and cols layouts */
10853 ierr = PetscLayoutCompare(mat->rmap,mat->cmap,cong);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10853,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10854 if (*cong) mat->congruentlayouts = 1;
10855 else mat->congruentlayouts = 0;
10856 } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
10857 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10858}
10859
10860/*@
10861 MatFreeIntermediateDataStructures - Free intermediate data structures created for reuse,
10862 e.g., matrx product of MatPtAP.
10863
10864 Collective on mat
10865
10866 Input Parameters:
10867. mat - the matrix
10868
10869 Output Parameter:
10870. mat - the matrix with intermediate data structures released
10871
10872 Level: advanced
10873
10874 Notes:
10875
10876.seealso: MatPtAP(), MatMatMult()
10877@*/
10878PetscErrorCode MatFreeIntermediateDataStructures(Mat mat)
10879{
10880 PetscErrorCode ierr;
10881
10882 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"; petscstack
->line[petscstack->currentsize] = 10882; petscstack->
petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
10883 PetscValidHeaderSpecific(mat,MAT_CLASSID,1)do { if (!mat) return PetscError(((MPI_Comm)0x44000001),10883
,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,85,PETSC_ERROR_INITIAL,"Null Object: Parameter # %d",1); if (
!PetscCheckPointer(mat,PETSC_OBJECT)) return PetscError(((MPI_Comm
)0x44000001),10883,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Invalid Pointer to Object: Parameter # %d"
,1); if (((PetscObject)(mat))->classid != MAT_CLASSID) { if
(((PetscObject)(mat))->classid == -1) return PetscError((
(MPI_Comm)0x44000001),10883,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,64,PETSC_ERROR_INITIAL,"Object already free: Parameter # %d"
,1); else return PetscError(((MPI_Comm)0x44000001),10883,__func__
,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c",62,PETSC_ERROR_INITIAL
,"Wrong type of object: Parameter # %d",1); } } while (0)
;
10884 PetscValidType(mat,1)do { if (!((PetscObject)mat)->type_name) return PetscError
(((MPI_Comm)0x44000001),10884,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,73,PETSC_ERROR_INITIAL,"%s object's type is not set: Argument # %d"
,((PetscObject)mat)->class_name,1); } while (0)
;
10885 if (mat->ops->freeintermediatedatastructures) {
10886 ierr = (*mat->ops->freeintermediatedatastructures)(mat);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),10886,__func__,"/sandbox/petsc/petsc.next/src/mat/interface/matrix.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
10887 }
10888 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
10889}