Bug Summary

File:mat/impls/aij/mpi/mpimatmatmult.c
Warning:line 643, column 13
Array access (from variable 'pi_oth') results in a null pointer dereference

Annotated Source Code

[?] Use j/k keys for keyboard navigation

1
2/*
3 Defines matrix-matrix product routines for pairs of MPIAIJ matrices
4 C = A * B
5*/
6#include <../src/mat/impls/aij/seq/aij.h> /*I "petscmat.h" I*/
7#include <../src/mat/utils/freespace.h>
8#include <../src/mat/impls/aij/mpi/mpiaij.h>
9#include <petscbt.h>
10#include <../src/mat/impls/dense/mpi/mpidense.h>
11#include <petsc/private/vecimpl.h>
12#include <petsc/private/vecscatterimpl.h>
13
14#if defined(PETSC_HAVE_HYPRE)
15PETSC_INTERNextern __attribute__((visibility ("hidden"))) PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat,Mat,PetscReal,Mat*);
16#endif
17
18PETSC_INTERNextern __attribute__((visibility ("hidden"))) PetscErrorCode MatMatMult_MPIAIJ_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill, Mat *C)
19{
20 PetscErrorCode ierr;
21#if defined(PETSC_HAVE_HYPRE)
22 const char *algTypes[4] = {"scalable","nonscalable","seqmpi","hypre"};
23 PetscInt nalg = 4;
24#else
25 const char *algTypes[3] = {"scalable","nonscalable","seqmpi"};
26 PetscInt nalg = 3;
27#endif
28 PetscInt alg = 1; /* set nonscalable algorithm as default */
29 MPI_Comm comm;
30 PetscBool flg;
31
32 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 32; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
33 if (scall == MAT_INITIAL_MATRIX) {
34 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),34,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
35 if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend)return PetscError(comm,35,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,60,PETSC_ERROR_INITIAL,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)"
,A->cmap->rstart,A->cmap->rend,B->rmap->rstart
,B->rmap->rend)
;
36
37 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatMatMult","Mat")0; do { PetscOptionItems PetscOptionsObjectBase; PetscOptionItems
*PetscOptionsObject = &PetscOptionsObjectBase; PetscMemzero
(PetscOptionsObject,sizeof(PetscOptionItems)); for (PetscOptionsObject
->count=(PetscOptionsPublish?-1:1); PetscOptionsObject->
count<2; PetscOptionsObject->count++) { PetscErrorCode _5_ierr
= PetscOptionsBegin_Private(PetscOptionsObject,PetscObjectComm
((PetscObject)A),((PetscObject)A)->prefix,"MatMatMult","Mat"
);do {if (__builtin_expect(!!(_5_ierr),0)) return PetscError(
((MPI_Comm)0x44000001),37,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_5_ierr,PETSC_ERROR_REPEAT," ");} while (0);
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),37,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
38 ierr = PetscOptionsEList("-matmatmult_via","Algorithmic approach","MatMatMult",algTypes,nalg,algTypes[1],&alg,&flg)PetscOptionsEList_Private(PetscOptionsObject,"-matmatmult_via"
,"Algorithmic approach","MatMatMult",algTypes,nalg,algTypes[1
],&alg,&flg)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),38,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
39 ierr = PetscOptionsEnd()_5_ierr = PetscOptionsEnd_Private(PetscOptionsObject);do {if (
__builtin_expect(!!(_5_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),39,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_5_ierr,PETSC_ERROR_REPEAT," ");} while (0);}} while (0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),39,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
40
41 if (!flg && B->cmap->N > 100000) { /* may switch to scalable algorithm as default */
42 MatInfo Ainfo,Binfo;
43 PetscInt nz_local;
44 PetscBool alg_scalable_loc=PETSC_FALSE,alg_scalable;
45
46 ierr = MatGetInfo(A,MAT_LOCAL,&Ainfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),46,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
47 ierr = MatGetInfo(B,MAT_LOCAL,&Binfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),47,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
48 nz_local = (PetscInt)(Ainfo.nz_allocated + Binfo.nz_allocated);
49
50 if (B->cmap->N > fill*nz_local) alg_scalable_loc = PETSC_TRUE;
51 ierr = MPIU_Allreduce(&alg_scalable_loc,&alg_scalable,1,MPIU_BOOL,MPI_LOR,comm)(PetscAllreduceBarrierCheck(comm,1,51,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&alg_scalable_loc),(&alg_scalable),(1
),(MPIU_BOOL),((MPI_Op)(0x58000007)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),51,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
52
53 if (alg_scalable) {
54 alg = 0; /* scalable algorithm would 50% slower than nonscalable algorithm */
55 ierr = PetscInfo2(B,"Use scalable algorithm, BN %D, fill*nz_allocated %g\n",B->cmap->N,fill*nz_local)PetscInfo_Private(__func__,B,"Use scalable algorithm, BN %D, fill*nz_allocated %g\n"
,B->cmap->N,fill*nz_local)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),55,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
56 }
57 }
58
59 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultSymbolic].active) ? (*PetscLogPLB)((MAT_MatMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),59,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
60 switch (alg) {
61 case 1:
62 ierr = MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),62,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
63 break;
64 case 2:
65 ierr = MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),65,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
66 break;
67#if defined(PETSC_HAVE_HYPRE)
68 case 3:
69 ierr = MatMatMultSymbolic_AIJ_AIJ_wHYPRE(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),69,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
70 break;
71#endif
72 default:
73 ierr = MatMatMultSymbolic_MPIAIJ_MPIAIJ(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),73,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
74 break;
75 }
76 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultSymbolic].active) ? (*PetscLogPLE)((MAT_MatMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),76,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
77
78 if (alg == 0 || alg == 1) {
79 Mat_MPIAIJ *c = (Mat_MPIAIJ*)(*C)->data;
80 Mat_APMPI *ap = c->ap;
81 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures","Mat")0; do { PetscOptionItems PetscOptionsObjectBase; PetscOptionItems
*PetscOptionsObject = &PetscOptionsObjectBase; PetscMemzero
(PetscOptionsObject,sizeof(PetscOptionItems)); for (PetscOptionsObject
->count=(PetscOptionsPublish?-1:1); PetscOptionsObject->
count<2; PetscOptionsObject->count++) { PetscErrorCode _5_ierr
= PetscOptionsBegin_Private(PetscOptionsObject,PetscObjectComm
((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures"
,"Mat");do {if (__builtin_expect(!!(_5_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),81,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_5_ierr,PETSC_ERROR_REPEAT," ");} while (0);
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),81,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
82 ap->freestruct = PETSC_FALSE;
83 ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",ap->freestruct,&ap->freestruct, NULL)PetscOptionsBool_Private(PetscOptionsObject,"-mat_freeintermediatedatastructures"
,"Free intermediate data structures","MatFreeIntermediateDataStructures"
,ap->freestruct,&ap->freestruct,((void*)0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),83,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
84 ierr = PetscOptionsEnd()_5_ierr = PetscOptionsEnd_Private(PetscOptionsObject);do {if (
__builtin_expect(!!(_5_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),84,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_5_ierr,PETSC_ERROR_REPEAT," ");} while (0);}} while (0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),84,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
85 }
86 }
87
88 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultNumeric].active) ? (*PetscLogPLB)((MAT_MatMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),88,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
89 ierr = (*(*C)->ops->matmultnumeric)(A,B,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),89,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
90 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultNumeric].active) ? (*PetscLogPLE)((MAT_MatMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),90,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
91 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
92}
93
94PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(Mat A)
95{
96 PetscErrorCode ierr;
97 Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data;
98 Mat_APMPI *ptap = a->ap;
99
100 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 100; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
101 ierr = PetscFree2(ptap->startsj_s,ptap->startsj_r)PetscFreeA(2,101,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(ptap->startsj_s),&(ptap->startsj_r))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),101,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
102 ierr = PetscFree(ptap->bufa)((*PetscTrFree)((void*)(ptap->bufa),102,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((ptap->bufa) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),102,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
103 ierr = MatDestroy(&ptap->P_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),103,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
104 ierr = MatDestroy(&ptap->P_oth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),104,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
105 ierr = MatDestroy(&ptap->Pt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),105,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
106 ierr = PetscFree(ptap->api)((*PetscTrFree)((void*)(ptap->api),106,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((ptap->api) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),106,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
107 ierr = PetscFree(ptap->apj)((*PetscTrFree)((void*)(ptap->apj),107,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((ptap->apj) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),107,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
108 ierr = PetscFree(ptap->apa)((*PetscTrFree)((void*)(ptap->apa),108,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((ptap->apa) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),108,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
109 ierr = ptap->destroy(A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),109,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
110 ierr = PetscFree(ptap)((*PetscTrFree)((void*)(ptap),110,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((ptap) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),110,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
111 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
112}
113
114PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat A,Mat P,Mat C)
115{
116 PetscErrorCode ierr;
117 Mat_MPIAIJ *a =(Mat_MPIAIJ*)A->data,*c=(Mat_MPIAIJ*)C->data;
118 Mat_SeqAIJ *ad =(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
119 Mat_SeqAIJ *cd =(Mat_SeqAIJ*)(c->A)->data,*co=(Mat_SeqAIJ*)(c->B)->data;
120 PetscScalar *cda=cd->a,*coa=co->a;
121 Mat_SeqAIJ *p_loc,*p_oth;
122 PetscScalar *apa,*ca;
123 PetscInt cm =C->rmap->n;
124 Mat_APMPI *ptap=c->ap;
125 PetscInt *api,*apj,*apJ,i,k;
126 PetscInt cstart=C->cmap->rstart;
127 PetscInt cdnz,conz,k0,k1;
128 MPI_Comm comm;
129 PetscMPIInt size;
130
131 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 131; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
132 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),132,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
133 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),133,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
134
135 if (!ptap) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"AP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'")return PetscError(comm,135,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,73,PETSC_ERROR_INITIAL,"AP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'"
)
;
136
137 /* 1) get P_oth = ptap->P_oth and P_loc = ptap->P_loc */
138 /*-----------------------------------------------------*/
139 /* update numerical values of P_oth and P_loc */
140 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),140,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
141 ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),141,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
142
143 /* 2) compute numeric C_loc = A_loc*P = Ad*P_loc + Ao*P_oth */
144 /*----------------------------------------------------------*/
145 /* get data from symbolic products */
146 p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
147 p_oth = NULL((void*)0);
148 if (size >1) {
149 p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
150 }
151
152 /* get apa for storing dense row A[i,:]*P */
153 apa = ptap->apa;
154
155 api = ptap->api;
156 apj = ptap->apj;
157 for (i=0; i<cm; i++) {
158 /* compute apa = A[i,:]*P */
159 AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa){ PetscInt _anz,_pnz,_j,_k,*_ai,*_aj,_row,*_pi,*_pj; PetscScalar
*_aa,_valtmp,*_pa; _ai = ad->i; _anz = _ai[i+1] - _ai[i];
_aj = ad->j + _ai[i]; _aa = ad->a + _ai[i]; for (_j=0;
_j<_anz; _j++) { _row = _aj[_j]; _pi = p_loc->i; _pnz =
_pi[_row+1] - _pi[_row]; _pj = p_loc->j + _pi[_row]; _pa =
p_loc->a + _pi[_row]; _valtmp = _aa[_j]; for (_k=0; _k<
_pnz; _k++) { apa[_pj[_k]] += _valtmp*_pa[_k]; } (void)PetscLogFlops
(2.0*_pnz); } _ai = ao->i; _anz = _ai[i+1] - _ai[i]; _aj =
ao->j + _ai[i]; _aa = ao->a + _ai[i]; for (_j=0; _j<
_anz; _j++) { _row = _aj[_j]; _pi = p_oth->i; _pnz = _pi[_row
+1] - _pi[_row]; _pj = p_oth->j + _pi[_row]; _pa = p_oth->
a + _pi[_row]; _valtmp = _aa[_j]; for (_k=0; _k<_pnz; _k++
) { apa[_pj[_k]] += _valtmp*_pa[_k]; } (void)PetscLogFlops(2.0
*_pnz); } }
;
160
161 /* set values in C */
162 apJ = apj + api[i];
163 cdnz = cd->i[i+1] - cd->i[i];
164 conz = co->i[i+1] - co->i[i];
165
166 /* 1st off-diagoanl part of C */
167 ca = coa + co->i[i];
168 k = 0;
169 for (k0=0; k0<conz; k0++) {
170 if (apJ[k] >= cstart) break;
171 ca[k0] = apa[apJ[k]];
172 apa[apJ[k++]] = 0.0;
173 }
174
175 /* diagonal part of C */
176 ca = cda + cd->i[i];
177 for (k1=0; k1<cdnz; k1++) {
178 ca[k1] = apa[apJ[k]];
179 apa[apJ[k++]] = 0.0;
180 }
181
182 /* 2nd off-diagoanl part of C */
183 ca = coa + co->i[i];
184 for (; k0<conz; k0++) {
185 ca[k0] = apa[apJ[k]];
186 apa[apJ[k++]] = 0.0;
187 }
188 }
189 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),189,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
190 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),190,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
191
192 if (ptap->freestruct) {
193 ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),193,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
194 }
195 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
196}
197
198PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat A,Mat P,PetscReal fill,Mat *C)
199{
200 PetscErrorCode ierr;
201 MPI_Comm comm;
202 PetscMPIInt size;
203 Mat Cmpi;
204 Mat_APMPI *ptap;
205 PetscFreeSpaceList free_space=NULL((void*)0),current_space=NULL((void*)0);
206 Mat_MPIAIJ *a =(Mat_MPIAIJ*)A->data,*c;
207 Mat_SeqAIJ *ad =(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_loc,*p_oth;
208 PetscInt *pi_loc,*pj_loc,*pi_oth,*pj_oth,*dnz,*onz;
209 PetscInt *adi=ad->i,*adj=ad->j,*aoi=ao->i,*aoj=ao->j,rstart=A->rmap->rstart;
210 PetscInt *lnk,i,pnz,row,*api,*apj,*Jptr,apnz,nspacedouble=0,j,nzi;
211 PetscInt am=A->rmap->n,pN=P->cmap->N,pn=P->cmap->n,pm=P->rmap->n;
212 PetscBT lnkbt;
213 PetscScalar *apa;
214 PetscReal afill;
215 MatType mtype;
216
217 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 217; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
218 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),218,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
219 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),219,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
220
221 /* create struct Mat_APMPI and attached it to C later */
222 ierr = PetscNew(&ptap)PetscMallocA(1,PETSC_TRUE,222,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(1)*sizeof(**((&ptap))),((&ptap)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),222,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
223
224 /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
225 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),225,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
226
227 /* get P_loc by taking all local rows of P */
228 ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),228,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
229
230 p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
231 pi_loc = p_loc->i; pj_loc = p_loc->j;
232 if (size > 1) {
233 p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
234 pi_oth = p_oth->i; pj_oth = p_oth->j;
235 } else {
236 p_oth = NULL((void*)0);
237 pi_oth = NULL((void*)0); pj_oth = NULL((void*)0);
238 }
239
240 /* first, compute symbolic AP = A_loc*P = A_diag*P_loc + A_off*P_oth */
241 /*-------------------------------------------------------------------*/
242 ierr = PetscMalloc1(am+2,&api)PetscMallocA(1,PETSC_FALSE,242,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(am+2)*sizeof(**(&api)),(&api))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),242,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
243 ptap->api = api;
244 api[0] = 0;
245
246 /* create and initialize a linked list */
247 ierr = PetscLLCondensedCreate(pN,pN,&lnk,&lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),247,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
248
249 /* Initial FreeSpace size is fill*(nnz(A)+nnz(P)) */
250 ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(adi[am],PetscIntSumTruncate(aoi[am],pi_loc[pm]))),&free_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),250,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
251 current_space = free_space;
252
253 ierr = MatPreallocateInitialize(comm,am,pn,dnz,onz)0; { PetscErrorCode _4_ierr; PetscInt __nrows = (am),__ncols =
(pn),__rstart,__start,__end; _4_ierr = PetscMallocA(2,PETSC_TRUE
,253,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)((size_t)__nrows)*sizeof(**(&dnz)),(&dnz),(size_t
)((size_t)__nrows)*sizeof(**(&onz)),(&onz));do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),253
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = 0; __end
= __start; _4_ierr = MPI_Scan(&__ncols,&__end,1,((MPI_Datatype
)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),253
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = __end
- __ncols; _4_ierr = MPI_Scan(&__nrows,&__rstart,1,(
(MPI_Datatype)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (
__builtin_expect(!!(_4_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),253,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __rstart = __rstart
- __nrows;
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),253,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
254 for (i=0; i<am; i++) {
255 /* diagonal portion of A */
256 nzi = adi[i+1] - adi[i];
257 for (j=0; j<nzi; j++) {
258 row = *adj++;
259 pnz = pi_loc[row+1] - pi_loc[row];
260 Jptr = pj_loc + pi_loc[row];
261 /* add non-zero cols of P into the sorted linked list lnk */
262 ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),262,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
263 }
264 /* off-diagonal portion of A */
265 nzi = aoi[i+1] - aoi[i];
266 for (j=0; j<nzi; j++) {
267 row = *aoj++;
268 pnz = pi_oth[row+1] - pi_oth[row];
269 Jptr = pj_oth + pi_oth[row];
270 ierr = PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),270,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
271 }
272
273 apnz = lnk[0];
274 api[i+1] = api[i] + apnz;
275
276 /* if free space is not available, double the total space in the list */
277 if (current_space->local_remaining<apnz) {
278 ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),278,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
279 nspacedouble++;
280 }
281
282 /* Copy data into free space, then initialize lnk */
283 ierr = PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),283,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
284 ierr = MatPreallocateSet(i+rstart,apnz,current_space->array,dnz,onz)0;{ PetscInt __i; if (i+rstart < __rstart) return PetscError
(((MPI_Comm)0x44000001),284,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D less than first local row %D"
,i+rstart,__rstart); if (i+rstart >= __rstart+__nrows) return
PetscError(((MPI_Comm)0x44000001),284,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D greater than last local row %D"
,i+rstart,__rstart+__nrows-1); for (__i=0; __i<apnz; __i++
) { if ((current_space->array)[__i] < __start || (current_space
->array)[__i] >= __end) onz[i+rstart - __rstart]++; else
if (dnz[i+rstart - __rstart] < __ncols) dnz[i+rstart - __rstart
]++; }}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),284,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
285
286 current_space->array += apnz;
287 current_space->local_used += apnz;
288 current_space->local_remaining -= apnz;
289 }
290
291 /* Allocate space for apj, initialize apj, and */
292 /* destroy list of free space and other temporary array(s) */
293 ierr = PetscMalloc1(api[am]+1,&ptap->apj)PetscMallocA(1,PETSC_FALSE,293,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(api[am]+1)*sizeof(**(&ptap->apj)),(&ptap
->apj))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),293,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
294 apj = ptap->apj;
295 ierr = PetscFreeSpaceContiguous(&free_space,ptap->apj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),295,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
296 ierr = PetscLLDestroy(lnk,lnkbt)(((*PetscTrFree)((void*)(lnk),296,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((lnk) = 0,0)) || PetscBTDestroy(&(lnkbt)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),296,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
297
298 /* malloc apa to store dense row A[i,:]*P */
299 ierr = PetscCalloc1(pN,&apa)PetscMallocA(1,PETSC_TRUE,299,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(pN)*sizeof(**(&apa)),(&apa))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),299,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
300
301 ptap->apa = apa;
302
303 /* create and assemble symbolic parallel matrix Cmpi */
304 /*----------------------------------------------------*/
305 ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),305,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
306 ierr = MatSetSizes(Cmpi,am,pn,PETSC_DETERMINE-1,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),306,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
307 ierr = MatSetBlockSizesFromMats(Cmpi,A,P);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),307,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
308
309 ierr = MatGetType(A,&mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),309,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
310 ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),310,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
311 ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),311,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
312
313 ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Cmpi, apj, api);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),313,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
314 ierr = MatAssemblyBegin(Cmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),314,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
315 ierr = MatAssemblyEnd(Cmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),315,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
316 ierr = MatPreallocateFinalize(dnz,onz)0;_4_ierr = PetscFreeA(2,316,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(dnz),&(onz));do {if (__builtin_expect(!!(_4_ierr),
0)) return PetscError(((MPI_Comm)0x44000001),316,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0);}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),316,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
317
318 ptap->destroy = Cmpi->ops->destroy;
319 ptap->duplicate = Cmpi->ops->duplicate;
320 Cmpi->ops->matmultnumeric = MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable;
321 Cmpi->ops->destroy = MatDestroy_MPIAIJ_MatMatMult;
322 Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
323
324 /* attach the supporting struct to Cmpi for reuse */
325 c = (Mat_MPIAIJ*)Cmpi->data;
326 c->ap = ptap;
327
328 *C = Cmpi;
329
330 /* set MatInfo */
331 afill = (PetscReal)api[am]/(adi[am]+aoi[am]+pi_loc[pm]+1) + 1.e-5;
332 if (afill < 1.0) afill = 1.0;
333 Cmpi->info.mallocs = nspacedouble;
334 Cmpi->info.fill_ratio_given = fill;
335 Cmpi->info.fill_ratio_needed = afill;
336
337#if defined(PETSC_USE_INFO1)
338 if (api[am]) {
339 ierr = PetscInfo3(Cmpi,"Reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)afill)PetscInfo_Private(__func__,Cmpi,"Reallocs %D; Fill ratio: given %g needed %g.\n"
,nspacedouble,(double)fill,(double)afill)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),339,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
340 ierr = PetscInfo1(Cmpi,"Use MatMatMult(A,B,MatReuse,%g,&C) for best performance.;\n",(double)afill)PetscInfo_Private(__func__,Cmpi,"Use MatMatMult(A,B,MatReuse,%g,&C) for best performance.;\n"
,(double)afill)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),340,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
341 } else {
342 ierr = PetscInfo(Cmpi,"Empty matrix product\n")PetscInfo_Private(__func__,Cmpi,"Empty matrix product\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),342,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
343 }
344#endif
345 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
346}
347
348PETSC_INTERNextern __attribute__((visibility ("hidden"))) PetscErrorCode MatMatMult_MPIAIJ_MPIDense(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
349{
350 PetscErrorCode ierr;
351
352 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 352; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
353 if (scall == MAT_INITIAL_MATRIX) {
354 ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultSymbolic].active) ? (*PetscLogPLB)((MAT_MatMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),354,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
355 ierr = MatMatMultSymbolic_MPIAIJ_MPIDense(A,B,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),355,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
356 ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultSymbolic].active) ? (*PetscLogPLE)((MAT_MatMultSymbolic
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),356,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
357 }
358 ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultNumeric].active) ? (*PetscLogPLB)((MAT_MatMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),358,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
359 ierr = MatMatMultNumeric_MPIAIJ_MPIDense(A,B,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),359,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
360 ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_MatMultNumeric].active) ? (*PetscLogPLE)((MAT_MatMultNumeric
),0,(PetscObject)(A),(PetscObject)(B),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),360,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
361 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
362}
363
364typedef struct {
365 Mat workB;
366 PetscScalar *rvalues,*svalues;
367 MPI_Request *rwaits,*swaits;
368} MPIAIJ_MPIDense;
369
370PetscErrorCode MatMPIAIJ_MPIDenseDestroy(void *ctx)
371{
372 MPIAIJ_MPIDense *contents = (MPIAIJ_MPIDense*) ctx;
373 PetscErrorCode ierr;
374
375 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 375; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
376 ierr = MatDestroy(&contents->workB);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),376,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
377 ierr = PetscFree4(contents->rvalues,contents->svalues,contents->rwaits,contents->swaits)PetscFreeA(4,377,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(contents->rvalues),&(contents->svalues),&
(contents->rwaits),&(contents->swaits))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),377,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
378 ierr = PetscFree(contents)((*PetscTrFree)((void*)(contents),378,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((contents) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),378,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
379 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
380}
381
382/*
383 This is a "dummy function" that handles the case where matrix C was created as a dense matrix
384 directly by the user and passed to MatMatMult() with the MAT_REUSE_MATRIX option
385
386 It is the same as MatMatMultSymbolic_MPIAIJ_MPIDense() except does not create C
387*/
388PetscErrorCode MatMatMultNumeric_MPIDense(Mat A,Mat B,Mat C)
389{
390 PetscErrorCode ierr;
391 PetscBool flg;
392 Mat_MPIAIJ *aij = (Mat_MPIAIJ*) A->data;
393 PetscInt nz = aij->B->cmap->n,to_n,to_entries,from_n,from_entries;
394 PetscContainer container;
395 MPIAIJ_MPIDense *contents;
396 VecScatter ctx = aij->Mvctx;
397
398 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 398; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
399 ierr = PetscObjectTypeCompare((PetscObject)B,MATMPIDENSE"mpidense",&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),399,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
400 if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Second matrix must be mpidense")return PetscError(((MPI_Comm)0x44000001),400,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,62,PETSC_ERROR_INITIAL,"Second matrix must be mpidense")
;
401
402 /* Handle case where where user provided the final C matrix rather than calling MatMatMult() with MAT_INITIAL_MATRIX*/
403 ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ"mpiaij",&flg);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),403,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
404 if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"First matrix must be MPIAIJ")return PetscError(((MPI_Comm)0x44000001),404,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,62,PETSC_ERROR_INITIAL,"First matrix must be MPIAIJ")
;
405
406 C->ops->matmultnumeric = MatMatMultNumeric_MPIAIJ_MPIDense;
407
408 ierr = PetscNew(&contents)PetscMallocA(1,PETSC_TRUE,408,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(1)*sizeof(**((&contents))),((&contents)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),408,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
409 /* Create work matrix used to store off processor rows of B needed for local product */
410 ierr = MatCreateSeqDense(PETSC_COMM_SELF((MPI_Comm)0x44000001),nz,B->cmap->N,NULL((void*)0),&contents->workB);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),410,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
411 /* Create work arrays needed */
412 ierr = VecScatterGetRemoteCount_Private(ctx,PETSC_TRUE/*send*/,&to_n,&to_entries);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),412,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
413 ierr = VecScatterGetRemoteCount_Private(ctx,PETSC_FALSE/*recv*/,&from_n,&from_entries);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),413,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
414 ierr = PetscMalloc4(B->cmap->N*from_entries,&contents->rvalues,B->cmap->N*to_entries,&contents->svalues,from_n,&contents->rwaits,to_n,&contents->swaits)PetscMallocA(4,PETSC_FALSE,414,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(B->cmap->N*from_entries)*sizeof(**(&contents
->rvalues)),(&contents->rvalues),(size_t)(B->cmap
->N*to_entries)*sizeof(**(&contents->svalues)),(&
contents->svalues),(size_t)(from_n)*sizeof(**(&contents
->rwaits)),(&contents->rwaits),(size_t)(to_n)*sizeof
(**(&contents->swaits)),(&contents->swaits))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),414,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
415
416 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)A),&container);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),416,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
417 ierr = PetscContainerSetPointer(container,contents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),417,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
418 ierr = PetscContainerSetUserDestroy(container,MatMPIAIJ_MPIDenseDestroy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),418,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
419 ierr = PetscObjectCompose((PetscObject)C,"workB",(PetscObject)container);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),419,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
420 ierr = PetscContainerDestroy(&container);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),420,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
421
422 ierr = (*C->ops->matmultnumeric)(A,B,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),422,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
423 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
424}
425
426PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIDense(Mat A,Mat B,PetscReal fill,Mat *C)
427{
428 PetscErrorCode ierr;
429 Mat_MPIAIJ *aij = (Mat_MPIAIJ*) A->data;
430 PetscInt nz = aij->B->cmap->n,to_n,to_entries,from_n,from_entries;
431 PetscContainer container;
432 MPIAIJ_MPIDense *contents;
433 VecScatter ctx = aij->Mvctx;
434 PetscInt m = A->rmap->n,n=B->cmap->n;
435
436 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 436; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
437 ierr = MatCreate(PetscObjectComm((PetscObject)B),C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),437,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
438 ierr = MatSetSizes(*C,m,n,A->rmap->N,B->cmap->N);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),438,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
439 ierr = MatSetBlockSizesFromMats(*C,A,B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),439,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
440 ierr = MatSetType(*C,MATMPIDENSE"mpidense");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),440,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
441 ierr = MatMPIDenseSetPreallocation(*C,NULL((void*)0));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),441,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
442 ierr = MatAssemblyBegin(*C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),442,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
443 ierr = MatAssemblyEnd(*C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),443,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
444
445 (*C)->ops->matmultnumeric = MatMatMultNumeric_MPIAIJ_MPIDense;
446
447 ierr = PetscNew(&contents)PetscMallocA(1,PETSC_TRUE,447,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(1)*sizeof(**((&contents))),((&contents)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),447,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
448 /* Create work matrix used to store off processor rows of B needed for local product */
449 ierr = MatCreateSeqDense(PETSC_COMM_SELF((MPI_Comm)0x44000001),nz,B->cmap->N,NULL((void*)0),&contents->workB);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),449,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
450 /* Create work arrays needed */
451 ierr = VecScatterGetRemoteCount_Private(ctx,PETSC_TRUE/*send*/,&to_n,&to_entries);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),451,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
452 ierr = VecScatterGetRemoteCount_Private(ctx,PETSC_FALSE/*recv*/,&from_n,&from_entries);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),452,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
453 ierr = PetscMalloc4(B->cmap->N*from_entries,&contents->rvalues,B->cmap->N*to_entries,&contents->svalues,from_n,&contents->rwaits,to_n,&contents->swaits)PetscMallocA(4,PETSC_FALSE,453,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(B->cmap->N*from_entries)*sizeof(**(&contents
->rvalues)),(&contents->rvalues),(size_t)(B->cmap
->N*to_entries)*sizeof(**(&contents->svalues)),(&
contents->svalues),(size_t)(from_n)*sizeof(**(&contents
->rwaits)),(&contents->rwaits),(size_t)(to_n)*sizeof
(**(&contents->swaits)),(&contents->swaits))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),453,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
454
455 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)A),&container);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),455,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
456 ierr = PetscContainerSetPointer(container,contents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),456,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
457 ierr = PetscContainerSetUserDestroy(container,MatMPIAIJ_MPIDenseDestroy);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),457,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
458 ierr = PetscObjectCompose((PetscObject)(*C),"workB",(PetscObject)container);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),458,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
459 ierr = PetscContainerDestroy(&container);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),459,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
460 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
461}
462
463/*
464 Performs an efficient scatter on the rows of B needed by this process; this is
465 a modification of the VecScatterBegin_() routines.
466*/
467PetscErrorCode MatMPIDenseScatter(Mat A,Mat B,Mat C,Mat *outworkB)
468{
469 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data;
470 PetscErrorCode ierr;
471 PetscScalar *b,*w,*svalues,*rvalues;
472 VecScatter ctx = aij->Mvctx;
473 PetscInt i,j,k;
474 const PetscInt *sindices,*sstarts,*rindices,*rstarts;
475 const PetscMPIInt *sprocs,*rprocs;
476 PetscInt nsends,nrecvs,nrecvs2;
477 MPI_Request *swaits,*rwaits;
478 MPI_Comm comm;
479 PetscMPIInt tag = ((PetscObject)ctx)->tag,ncols = B->cmap->N, nrows = aij->B->cmap->n,imdex,nrowsB = B->rmap->n,nsends_mpi,nrecvs_mpi;
480 MPI_Status status;
481 MPIAIJ_MPIDense *contents;
482 PetscContainer container;
483 Mat workB;
484
485 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 485; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
486 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),486,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
487 ierr = PetscObjectQuery((PetscObject)C,"workB",(PetscObject*)&container);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),487,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
488 if (!container) SETERRQ(comm,PETSC_ERR_PLIB,"Container does not exist")return PetscError(comm,488,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,77,PETSC_ERROR_INITIAL,"Container does not exist")
;
489 ierr = PetscContainerGetPointer(container,(void**)&contents);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),489,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
490
491 workB = *outworkB = contents->workB;
492 if (nrows != workB->rmap->n) SETERRQ2(comm,PETSC_ERR_PLIB,"Number of rows of workB %D not equal to columns of aij->B %D",nrows,workB->cmap->n)return PetscError(comm,492,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,77,PETSC_ERROR_INITIAL,"Number of rows of workB %D not equal to columns of aij->B %D"
,nrows,workB->cmap->n)
;
493 ierr = VecScatterGetRemote_Private(ctx,PETSC_TRUE/*send*/,&nsends,&sstarts,&sindices,&sprocs,NULL((void*)0)/*bs*/);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),493,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
494 ierr = VecScatterGetRemoteOrdered_Private(ctx,PETSC_FALSE/*recv*/,&nrecvs,&rstarts,&rindices,&rprocs,NULL((void*)0)/*bs*/);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),494,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
495 ierr = PetscMPIIntCast(nsends,&nsends_mpi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),495,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
496 ierr = PetscMPIIntCast(nrecvs,&nrecvs_mpi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),496,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
497 svalues = contents->svalues;
498 rvalues = contents->rvalues;
499 swaits = contents->swaits;
500 rwaits = contents->rwaits;
501
502 ierr = MatDenseGetArray(B,&b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),502,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
503 ierr = MatDenseGetArray(workB,&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),503,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
504
505 for (i=0; i<nrecvs; i++) {
506 ierr = MPI_Irecv(rvalues+ncols*(rstarts[i]-rstarts[0]),ncols*(rstarts[i+1]-rstarts[i]),MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i)((petsc_irecv_ct++,0) || PetscMPITypeSize(&(petsc_irecv_len
),(ncols*(rstarts[i+1]-rstarts[i])),(((MPI_Datatype)0x4c00080b
))) || MPI_Irecv((rvalues+ncols*(rstarts[i]-rstarts[0])),(ncols
*(rstarts[i+1]-rstarts[i])),(((MPI_Datatype)0x4c00080b)),(rprocs
[i]),(tag),(comm),(rwaits+i)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),506,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
507 }
508
509 for (i=0; i<nsends; i++) {
510 /* pack a message at a time */
511 for (j=0; j<sstarts[i+1]-sstarts[i]; j++) {
512 for (k=0; k<ncols; k++) {
513 svalues[ncols*(sstarts[i]-sstarts[0]+j) + k] = b[sindices[sstarts[i]+j] + nrowsB*k];
514 }
515 }
516 ierr = MPI_Isend(svalues+ncols*(sstarts[i]-sstarts[0]),ncols*(sstarts[i+1]-sstarts[i]),MPIU_SCALAR,sprocs[i],tag,comm,swaits+i)((petsc_isend_ct++,0) || PetscMPITypeSize(&(petsc_isend_len
),(ncols*(sstarts[i+1]-sstarts[i])),(((MPI_Datatype)0x4c00080b
))) || MPI_Isend((svalues+ncols*(sstarts[i]-sstarts[0])),(ncols
*(sstarts[i+1]-sstarts[i])),(((MPI_Datatype)0x4c00080b)),(sprocs
[i]),(tag),(comm),(swaits+i)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),516,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
517 }
518
519 nrecvs2 = nrecvs;
520 while (nrecvs2) {
521 ierr = MPI_Waitany(nrecvs_mpi,rwaits,&imdex,&status)((petsc_wait_any_ct++,petsc_sum_of_waits_ct++,0) || MPI_Waitany
((nrecvs_mpi),(rwaits),(&imdex),(&status)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),521,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
522 nrecvs2--;
523 /* unpack a message at a time */
524 for (j=0; j<rstarts[imdex+1]-rstarts[imdex]; j++) {
525 for (k=0; k<ncols; k++) {
526 w[rindices[rstarts[imdex]+j] + nrows*k] = rvalues[ncols*(rstarts[imdex]-rstarts[0]+j) + k];
527 }
528 }
529 }
530 if (nsends) {ierr = MPI_Waitall(nsends_mpi,swaits,MPI_STATUSES_IGNORE)((petsc_wait_all_ct++,petsc_sum_of_waits_ct += (PetscLogDouble
) (nsends_mpi),0) || MPI_Waitall((nsends_mpi),(swaits),((MPI_Status
*)1)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),530,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
531
532 ierr = VecScatterRestoreRemote_Private(ctx,PETSC_TRUE/*send*/,&nsends,&sstarts,&sindices,&sprocs,NULL((void*)0)/*bs*/);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),532,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
533 ierr = VecScatterRestoreRemoteOrdered_Private(ctx,PETSC_FALSE/*recv*/,&nrecvs,&rstarts,&rindices,&rprocs,NULL((void*)0)/*bs*/);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),533,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
534 ierr = MatDenseRestoreArray(B,&b);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),534,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
535 ierr = MatDenseRestoreArray(workB,&w);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),535,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
536 ierr = MatAssemblyBegin(workB,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),536,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
537 ierr = MatAssemblyEnd(workB,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),537,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
538 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
539
540}
541extern PetscErrorCode MatMatMultNumericAdd_SeqAIJ_SeqDense(Mat,Mat,Mat);
542
543PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIDense(Mat A,Mat B,Mat C)
544{
545 PetscErrorCode ierr;
546 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data;
547 Mat_MPIDense *bdense = (Mat_MPIDense*)B->data;
548 Mat_MPIDense *cdense = (Mat_MPIDense*)C->data;
549 Mat workB;
550
551 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 551; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
552 /* diagonal block of A times all local rows of B*/
553 ierr = MatMatMultNumeric_SeqAIJ_SeqDense(aij->A,bdense->A,cdense->A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),553,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
554
555 /* get off processor parts of B needed to complete the product */
556 ierr = MatMPIDenseScatter(A,B,C,&workB);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),556,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
557
558 /* off-diagonal block of A times nonlocal rows of B */
559 ierr = MatMatMultNumericAdd_SeqAIJ_SeqDense(aij->B,workB,cdense->A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),559,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
560 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),560,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
561 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),561,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
562 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
563}
564
565PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
566{
567 PetscErrorCode ierr;
568 Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data,*c=(Mat_MPIAIJ*)C->data;
569 Mat_SeqAIJ *ad = (Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
570 Mat_SeqAIJ *cd = (Mat_SeqAIJ*)(c->A)->data,*co=(Mat_SeqAIJ*)(c->B)->data;
571 PetscInt *adi = ad->i,*adj,*aoi=ao->i,*aoj;
572 PetscScalar *ada,*aoa,*cda=cd->a,*coa=co->a;
573 Mat_SeqAIJ *p_loc,*p_oth;
574 PetscInt *pi_loc,*pj_loc,*pi_oth,*pj_oth,*pj;
575 PetscScalar *pa_loc,*pa_oth,*pa,valtmp,*ca;
576 PetscInt cm = C->rmap->n,anz,pnz;
577 Mat_APMPI *ptap = c->ap;
578 PetscScalar *apa_sparse;
579 PetscInt *api,*apj,*apJ,i,j,k,row;
580 PetscInt cstart = C->cmap->rstart;
581 PetscInt cdnz,conz,k0,k1,nextp;
582 MPI_Comm comm;
583 PetscMPIInt size;
584
585 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 585; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
586 ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),586,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
587 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),587,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
588
589 if (!ptap) {
1
Assuming 'ptap' is non-null
2
Taking false branch
590 SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"AP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'")return PetscError(comm,590,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,73,PETSC_ERROR_INITIAL,"AP cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'"
)
;
591 }
592 apa_sparse = ptap->apa;
593
594 /* 1) get P_oth = ptap->P_oth and P_loc = ptap->P_loc */
595 /*-----------------------------------------------------*/
596 /* update numerical values of P_oth and P_loc */
597 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),597,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
598 ierr = MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),598,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
599
600 /* 2) compute numeric C_loc = A_loc*P = Ad*P_loc + Ao*P_oth */
601 /*----------------------------------------------------------*/
602 /* get data from symbolic products */
603 p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
604 pi_loc = p_loc->i; pj_loc = p_loc->j; pa_loc = p_loc->a;
605 if (size >1) {
3
Assuming 'size' is <= 1
4
Taking false branch
606 p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
607 pi_oth = p_oth->i; pj_oth = p_oth->j; pa_oth = p_oth->a;
608 } else {
609 p_oth = NULL((void*)0); pi_oth = NULL((void*)0); pj_oth = NULL((void*)0); pa_oth = NULL((void*)0);
5
Null pointer value stored to 'pi_oth'
610 }
611
612 api = ptap->api;
613 apj = ptap->apj;
614 for (i=0; i<cm; i++) {
6
Assuming 'i' is < 'cm'
7
Loop condition is true. Entering loop body
17
Assuming 'i' is < 'cm'
18
Loop condition is true. Entering loop body
615 apJ = apj + api[i];
616
617 /* diagonal portion of A */
618 anz = adi[i+1] - adi[i];
619 adj = ad->j + adi[i];
620 ada = ad->a + adi[i];
621 for (j=0; j<anz; j++) {
8
Assuming 'j' is >= 'anz'
9
Loop condition is false. Execution continues on line 638
19
Assuming 'j' is >= 'anz'
20
Loop condition is false. Execution continues on line 638
622 row = adj[j];
623 pnz = pi_loc[row+1] - pi_loc[row];
624 pj = pj_loc + pi_loc[row];
625 pa = pa_loc + pi_loc[row];
626 /* perform sparse axpy */
627 valtmp = ada[j];
628 nextp = 0;
629 for (k=0; nextp<pnz; k++) {
630 if (apJ[k] == pj[nextp]) { /* column of AP == column of P */
631 apa_sparse[k] += valtmp*pa[nextp++];
632 }
633 }
634 ierr = PetscLogFlops(2.0*pnz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),634,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
635 }
636
637 /* off-diagonal portion of A */
638 anz = aoi[i+1] - aoi[i];
639 aoj = ao->j + aoi[i];
640 aoa = ao->a + aoi[i];
641 for (j=0; j<anz; j++) {
10
Assuming 'j' is >= 'anz'
11
Loop condition is false. Execution continues on line 658
21
Assuming 'j' is < 'anz'
22
Loop condition is true. Entering loop body
642 row = aoj[j];
643 pnz = pi_oth[row+1] - pi_oth[row];
23
Array access (from variable 'pi_oth') results in a null pointer dereference
644 pj = pj_oth + pi_oth[row];
645 pa = pa_oth + pi_oth[row];
646 /* perform sparse axpy */
647 valtmp = aoa[j];
648 nextp = 0;
649 for (k=0; nextp<pnz; k++) {
650 if (apJ[k] == pj[nextp]) { /* column of AP == column of P */
651 apa_sparse[k] += valtmp*pa[nextp++];
652 }
653 }
654 ierr = PetscLogFlops(2.0*pnz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),654,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
655 }
656
657 /* set values in C */
658 cdnz = cd->i[i+1] - cd->i[i];
659 conz = co->i[i+1] - co->i[i];
660
661 /* 1st off-diagoanl part of C */
662 ca = coa + co->i[i];
663 k = 0;
664 for (k0=0; k0<conz; k0++) {
12
Assuming 'k0' is >= 'conz'
13
Loop condition is false. Execution continues on line 672
665 if (apJ[k] >= cstart) break;
666 ca[k0] = apa_sparse[k];
667 apa_sparse[k] = 0.0;
668 k++;
669 }
670
671 /* diagonal part of C */
672 ca = cda + cd->i[i];
673 for (k1=0; k1<cdnz; k1++) {
14
Assuming 'k1' is >= 'cdnz'
15
Loop condition is false. Execution continues on line 680
674 ca[k1] = apa_sparse[k];
675 apa_sparse[k] = 0.0;
676 k++;
677 }
678
679 /* 2nd off-diagoanl part of C */
680 ca = coa + co->i[i];
681 for (; k0<conz; k0++) {
16
Loop condition is false. Execution continues on line 614
682 ca[k0] = apa_sparse[k];
683 apa_sparse[k] = 0.0;
684 k++;
685 }
686 }
687 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),687,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
688 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),688,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
689
690 if (ptap->freestruct) {
691 ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),691,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
692 }
693 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
694}
695
696/* same as MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(), except using LLCondensed to avoid O(BN) memory requirement */
697PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat *C)
698{
699 PetscErrorCode ierr;
700 MPI_Comm comm;
701 PetscMPIInt size;
702 Mat Cmpi;
703 Mat_APMPI *ptap;
704 PetscFreeSpaceList free_space = NULL((void*)0),current_space=NULL((void*)0);
705 Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data,*c;
706 Mat_SeqAIJ *ad = (Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_loc,*p_oth;
707 PetscInt *pi_loc,*pj_loc,*pi_oth,*pj_oth,*dnz,*onz;
708 PetscInt *adi=ad->i,*adj=ad->j,*aoi=ao->i,*aoj=ao->j,rstart=A->rmap->rstart;
709 PetscInt i,pnz,row,*api,*apj,*Jptr,apnz,nspacedouble=0,j,nzi,*lnk,apnz_max=0;
710 PetscInt am=A->rmap->n,pn=P->cmap->n,pm=P->rmap->n,lsize=pn+20;
711 PetscReal afill;
712 PetscScalar *apa;
713 MatType mtype;
714
715 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 715; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
716 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),716,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
717 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),717,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
718
719 /* create struct Mat_APMPI and attached it to C later */
720 ierr = PetscNew(&ptap)PetscMallocA(1,PETSC_TRUE,720,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(1)*sizeof(**((&ptap))),((&ptap)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),720,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
721
722 /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
723 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),723,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
724
725 /* get P_loc by taking all local rows of P */
726 ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),726,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
727
728 p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
729 pi_loc = p_loc->i; pj_loc = p_loc->j;
730 if (size > 1) {
731 p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
732 pi_oth = p_oth->i; pj_oth = p_oth->j;
733 } else {
734 p_oth = NULL((void*)0);
735 pi_oth = NULL((void*)0); pj_oth = NULL((void*)0);
736 }
737
738 /* first, compute symbolic AP = A_loc*P = A_diag*P_loc + A_off*P_oth */
739 /*-------------------------------------------------------------------*/
740 ierr = PetscMalloc1(am+2,&api)PetscMallocA(1,PETSC_FALSE,740,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(am+2)*sizeof(**(&api)),(&api))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),740,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
741 ptap->api = api;
742 api[0] = 0;
743
744 ierr = PetscLLCondensedCreate_Scalable(lsize,&lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),744,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
745
746 /* Initial FreeSpace size is fill*(nnz(A)+nnz(P)) */
747 ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(adi[am],PetscIntSumTruncate(aoi[am],pi_loc[pm]))),&free_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),747,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
748 current_space = free_space;
749 ierr = MatPreallocateInitialize(comm,am,pn,dnz,onz)0; { PetscErrorCode _4_ierr; PetscInt __nrows = (am),__ncols =
(pn),__rstart,__start,__end; _4_ierr = PetscMallocA(2,PETSC_TRUE
,749,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)((size_t)__nrows)*sizeof(**(&dnz)),(&dnz),(size_t
)((size_t)__nrows)*sizeof(**(&onz)),(&onz));do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),749
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = 0; __end
= __start; _4_ierr = MPI_Scan(&__ncols,&__end,1,((MPI_Datatype
)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),749
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = __end
- __ncols; _4_ierr = MPI_Scan(&__nrows,&__rstart,1,(
(MPI_Datatype)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (
__builtin_expect(!!(_4_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),749,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __rstart = __rstart
- __nrows;
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),749,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
750 for (i=0; i<am; i++) {
751 /* diagonal portion of A */
752 nzi = adi[i+1] - adi[i];
753 for (j=0; j<nzi; j++) {
754 row = *adj++;
755 pnz = pi_loc[row+1] - pi_loc[row];
756 Jptr = pj_loc + pi_loc[row];
757 /* Expand list if it is not long enough */
758 if (pnz+apnz_max > lsize) {
759 lsize = pnz+apnz_max;
760 ierr = PetscLLCondensedExpand_Scalable(lsize, &lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),760,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
761 }
762 /* add non-zero cols of P into the sorted linked list lnk */
763 ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),763,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
764 apnz = *lnk; /* The first element in the list is the number of items in the list */
765 api[i+1] = api[i] + apnz;
766 if (apnz > apnz_max) apnz_max = apnz;
767 }
768 /* off-diagonal portion of A */
769 nzi = aoi[i+1] - aoi[i];
770 for (j=0; j<nzi; j++) {
771 row = *aoj++;
772 pnz = pi_oth[row+1] - pi_oth[row];
773 Jptr = pj_oth + pi_oth[row];
774 /* Expand list if it is not long enough */
775 if (pnz+apnz_max > lsize) {
776 lsize = pnz + apnz_max;
777 ierr = PetscLLCondensedExpand_Scalable(lsize, &lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),777,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
778 }
779 /* add non-zero cols of P into the sorted linked list lnk */
780 ierr = PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),780,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
781 apnz = *lnk; /* The first element in the list is the number of items in the list */
782 api[i+1] = api[i] + apnz;
783 if (apnz > apnz_max) apnz_max = apnz;
784 }
785 apnz = *lnk;
786 api[i+1] = api[i] + apnz;
787 if (apnz > apnz_max) apnz_max = apnz;
788
789 /* if free space is not available, double the total space in the list */
790 if (current_space->local_remaining<apnz) {
791 ierr = PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),791,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
792 nspacedouble++;
793 }
794
795 /* Copy data into free space, then initialize lnk */
796 ierr = PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),796,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
797 ierr = MatPreallocateSet(i+rstart,apnz,current_space->array,dnz,onz)0;{ PetscInt __i; if (i+rstart < __rstart) return PetscError
(((MPI_Comm)0x44000001),797,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D less than first local row %D"
,i+rstart,__rstart); if (i+rstart >= __rstart+__nrows) return
PetscError(((MPI_Comm)0x44000001),797,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D greater than last local row %D"
,i+rstart,__rstart+__nrows-1); for (__i=0; __i<apnz; __i++
) { if ((current_space->array)[__i] < __start || (current_space
->array)[__i] >= __end) onz[i+rstart - __rstart]++; else
if (dnz[i+rstart - __rstart] < __ncols) dnz[i+rstart - __rstart
]++; }}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),797,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
798
799 current_space->array += apnz;
800 current_space->local_used += apnz;
801 current_space->local_remaining -= apnz;
802 }
803
804 /* Allocate space for apj, initialize apj, and */
805 /* destroy list of free space and other temporary array(s) */
806 ierr = PetscMalloc1(api[am]+1,&ptap->apj)PetscMallocA(1,PETSC_FALSE,806,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(api[am]+1)*sizeof(**(&ptap->apj)),(&ptap
->apj))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),806,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
807 apj = ptap->apj;
808 ierr = PetscFreeSpaceContiguous(&free_space,ptap->apj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),808,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
809 ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),809,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
810
811 /* create and assemble symbolic parallel matrix Cmpi */
812 /*----------------------------------------------------*/
813 ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),813,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
814 ierr = MatSetSizes(Cmpi,am,pn,PETSC_DETERMINE-1,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),814,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
815 ierr = MatSetBlockSizesFromMats(Cmpi,A,P);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),815,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
816 ierr = MatGetType(A,&mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),816,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
817 ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),817,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
818 ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),818,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
819
820 /* malloc apa for assembly Cmpi */
821 ierr = PetscCalloc1(apnz_max,&apa)PetscMallocA(1,PETSC_TRUE,821,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(apnz_max)*sizeof(**(&apa)),(&apa))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),821,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
822 ptap->apa = apa;
823
824 ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Cmpi, apj, api);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),824,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
825 ierr = MatAssemblyBegin(Cmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),825,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
826 ierr = MatAssemblyEnd(Cmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),826,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
827 ierr = MatPreallocateFinalize(dnz,onz)0;_4_ierr = PetscFreeA(2,827,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(dnz),&(onz));do {if (__builtin_expect(!!(_4_ierr),
0)) return PetscError(((MPI_Comm)0x44000001),827,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0);}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),827,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
828
829 ptap->destroy = Cmpi->ops->destroy;
830 ptap->duplicate = Cmpi->ops->duplicate;
831 Cmpi->ops->matmultnumeric = MatMatMultNumeric_MPIAIJ_MPIAIJ;
832 Cmpi->ops->destroy = MatDestroy_MPIAIJ_MatMatMult;
833 Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
834
835 /* attach the supporting struct to Cmpi for reuse */
836 c = (Mat_MPIAIJ*)Cmpi->data;
837 c->ap = ptap;
838 *C = Cmpi;
839
840 /* set MatInfo */
841 afill = (PetscReal)api[am]/(adi[am]+aoi[am]+pi_loc[pm]+1) + 1.e-5;
842 if (afill < 1.0) afill = 1.0;
843 Cmpi->info.mallocs = nspacedouble;
844 Cmpi->info.fill_ratio_given = fill;
845 Cmpi->info.fill_ratio_needed = afill;
846
847#if defined(PETSC_USE_INFO1)
848 if (api[am]) {
849 ierr = PetscInfo3(Cmpi,"Reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)afill)PetscInfo_Private(__func__,Cmpi,"Reallocs %D; Fill ratio: given %g needed %g.\n"
,nspacedouble,(double)fill,(double)afill)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),849,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
850 ierr = PetscInfo1(Cmpi,"Use MatMatMult(A,B,MatReuse,%g,&C) for best performance.;\n",(double)afill)PetscInfo_Private(__func__,Cmpi,"Use MatMatMult(A,B,MatReuse,%g,&C) for best performance.;\n"
,(double)afill)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),850,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
851 } else {
852 ierr = PetscInfo(Cmpi,"Empty matrix product\n")PetscInfo_Private(__func__,Cmpi,"Empty matrix product\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),852,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
853 }
854#endif
855 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
856}
857
858/* This function is needed for the seqMPI matrix-matrix multiplication. */
859/* Three input arrays are merged to one output array. The size of the */
860/* output array is also output. Duplicate entries only show up once. */
861static void Merge3SortedArrays(PetscInt size1, PetscInt *in1,
862 PetscInt size2, PetscInt *in2,
863 PetscInt size3, PetscInt *in3,
864 PetscInt *size4, PetscInt *out)
865{
866 int i = 0, j = 0, k = 0, l = 0;
867
868 /* Traverse all three arrays */
869 while (i<size1 && j<size2 && k<size3) {
870 if (in1[i] < in2[j] && in1[i] < in3[k]) {
871 out[l++] = in1[i++];
872 }
873 else if(in2[j] < in1[i] && in2[j] < in3[k]) {
874 out[l++] = in2[j++];
875 }
876 else if(in3[k] < in1[i] && in3[k] < in2[j]) {
877 out[l++] = in3[k++];
878 }
879 else if(in1[i] == in2[j] && in1[i] < in3[k]) {
880 out[l++] = in1[i];
881 i++, j++;
882 }
883 else if(in1[i] == in3[k] && in1[i] < in2[j]) {
884 out[l++] = in1[i];
885 i++, k++;
886 }
887 else if(in3[k] == in2[j] && in2[j] < in1[i]) {
888 out[l++] = in2[j];
889 k++, j++;
890 }
891 else if(in1[i] == in2[j] && in1[i] == in3[k]) {
892 out[l++] = in1[i];
893 i++, j++, k++;
894 }
895 }
896
897 /* Traverse two remaining arrays */
898 while (i<size1 && j<size2) {
899 if (in1[i] < in2[j]) {
900 out[l++] = in1[i++];
901 }
902 else if(in1[i] > in2[j]) {
903 out[l++] = in2[j++];
904 }
905 else {
906 out[l++] = in1[i];
907 i++, j++;
908 }
909 }
910
911 while (i<size1 && k<size3) {
912 if (in1[i] < in3[k]) {
913 out[l++] = in1[i++];
914 }
915 else if(in1[i] > in3[k]) {
916 out[l++] = in3[k++];
917 }
918 else {
919 out[l++] = in1[i];
920 i++, k++;
921 }
922 }
923
924 while (k<size3 && j<size2) {
925 if (in3[k] < in2[j]) {
926 out[l++] = in3[k++];
927 }
928 else if(in3[k] > in2[j]) {
929 out[l++] = in2[j++];
930 }
931 else {
932 out[l++] = in3[k];
933 k++, j++;
934 }
935 }
936
937 /* Traverse one remaining array */
938 while (i<size1) out[l++] = in1[i++];
939 while (j<size2) out[l++] = in2[j++];
940 while (k<size3) out[l++] = in3[k++];
941
942 *size4 = l;
943}
944
945/* This matrix-matrix multiplication algorithm divides the multiplication into three multiplications and */
946/* adds up the products. Two of these three multiplications are performed with existing (sequential) */
947/* matrix-matrix multiplications. */
948PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(Mat A, Mat P, PetscReal fill, Mat *C)
949{
950 PetscErrorCode ierr;
951 MPI_Comm comm;
952 PetscMPIInt size;
953 Mat Cmpi;
954 Mat_APMPI *ptap;
955 PetscFreeSpaceList free_space_diag=NULL((void*)0), current_space=NULL((void*)0);
956 Mat_MPIAIJ *a =(Mat_MPIAIJ*)A->data;
957 Mat_SeqAIJ *ad =(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_loc;
958 Mat_MPIAIJ *p =(Mat_MPIAIJ*)P->data;
959 Mat_MPIAIJ *c;
960 Mat_SeqAIJ *adpd_seq, *p_off, *aopoth_seq;
961 PetscInt adponz, adpdnz;
962 PetscInt *pi_loc,*dnz,*onz;
963 PetscInt *adi=ad->i,*adj=ad->j,*aoi=ao->i,rstart=A->rmap->rstart;
964 PetscInt *lnk,i, i1=0,pnz,row,*adpoi,*adpoj, *api, *adpoJ, *aopJ, *apJ,*Jptr, aopnz, nspacedouble=0,j,nzi,
965 *apj,apnz, *adpdi, *adpdj, *adpdJ, *poff_i, *poff_j, *j_temp, *aopothi, *aopothj;
966 PetscInt am=A->rmap->n,pN=P->cmap->N,pn=P->cmap->n,pm=P->rmap->n, p_colstart, p_colend;
967 PetscBT lnkbt;
968 PetscScalar *apa;
969 PetscReal afill;
970 PetscMPIInt rank;
971 Mat adpd, aopoth;
972 MatType mtype;
973 const char *prefix;
974
975 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 975; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
976 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),976,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
977 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),977,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
978 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),978,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
979 ierr = MatGetOwnershipRangeColumn(P, &p_colstart, &p_colend); CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),979,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
980
981 /* create struct Mat_APMPI and attached it to C later */
982 ierr = PetscNew(&ptap)PetscMallocA(1,PETSC_TRUE,982,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(1)*sizeof(**((&ptap))),((&ptap)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),982,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
983
984 /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
985 ierr = MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),985,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
986
987 /* get P_loc by taking all local rows of P */
988 ierr = MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),988,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
989
990
991 p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
992 pi_loc = p_loc->i;
993
994 /* Allocate memory for the i arrays of the matrices A*P, A_diag*P_off and A_offd * P */
995 ierr = PetscMalloc1(am+2,&api)PetscMallocA(1,PETSC_FALSE,995,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(am+2)*sizeof(**(&api)),(&api))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),995,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
996 ierr = PetscMalloc1(am+2,&adpoi)PetscMallocA(1,PETSC_FALSE,996,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(am+2)*sizeof(**(&adpoi)),(&adpoi))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),996,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
997
998 adpoi[0] = 0;
999 ptap->api = api;
1000 api[0] = 0;
1001
1002 /* create and initialize a linked list, will be used for both A_diag * P_loc_off and A_offd * P_oth */
1003 ierr = PetscLLCondensedCreate(pN,pN,&lnk,&lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1003,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1004 ierr = MatPreallocateInitialize(comm,am,pn,dnz,onz)0; { PetscErrorCode _4_ierr; PetscInt __nrows = (am),__ncols =
(pn),__rstart,__start,__end; _4_ierr = PetscMallocA(2,PETSC_TRUE
,1004,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)((size_t)__nrows)*sizeof(**(&dnz)),(&dnz),(size_t
)((size_t)__nrows)*sizeof(**(&onz)),(&onz));do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),1004
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = 0; __end
= __start; _4_ierr = MPI_Scan(&__ncols,&__end,1,((MPI_Datatype
)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),1004
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = __end
- __ncols; _4_ierr = MPI_Scan(&__nrows,&__rstart,1,(
(MPI_Datatype)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (
__builtin_expect(!!(_4_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1004,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __rstart = __rstart
- __nrows;
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1004,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1005
1006 /* Symbolic calc of A_loc_diag * P_loc_diag */
1007 ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1007,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1008 ierr = MatSetOptionsPrefix(a->A,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1008,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1009 ierr = MatAppendOptionsPrefix(a->A,"inner_diag_");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1009,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1010 ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(a->A, p->A, fill, &adpd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1010,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1011 adpd_seq = (Mat_SeqAIJ*)((adpd)->data);
1012 adpdi = adpd_seq->i; adpdj = adpd_seq->j;
1013 p_off = (Mat_SeqAIJ*)((p->B)->data);
1014 poff_i = p_off->i; poff_j = p_off->j;
1015
1016 /* j_temp stores indices of a result row before they are added to the linked list */
1017 ierr = PetscMalloc1(pN+2,&j_temp)PetscMallocA(1,PETSC_FALSE,1017,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(pN+2)*sizeof(**(&j_temp)),(&j_temp))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1017,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1018
1019
1020 /* Symbolic calc of the A_diag * p_loc_off */
1021 /* Initial FreeSpace size is fill*(nnz(A)+nnz(P)) */
1022 ierr = PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(adi[am],PetscIntSumTruncate(aoi[am],pi_loc[pm]))),&free_space_diag);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1022,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1023 current_space = free_space_diag;
1024
1025 for (i=0; i<am; i++) {
1026 /* A_diag * P_loc_off */
1027 nzi = adi[i+1] - adi[i];
1028 for (j=0; j<nzi; j++) {
1029 row = *adj++;
1030 pnz = poff_i[row+1] - poff_i[row];
1031 Jptr = poff_j + poff_i[row];
1032 for(i1 = 0; i1 < pnz; i1++) {
1033 j_temp[i1] = p->garray[Jptr[i1]];
1034 }
1035 /* add non-zero cols of P into the sorted linked list lnk */
1036 ierr = PetscLLCondensedAddSorted(pnz,j_temp,lnk,lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1036,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1037 }
1038
1039 adponz = lnk[0];
1040 adpoi[i+1] = adpoi[i] + adponz;
1041
1042 /* if free space is not available, double the total space in the list */
1043 if (current_space->local_remaining<adponz) {
1044 ierr = PetscFreeSpaceGet(PetscIntSumTruncate(adponz,current_space->total_array_size),&current_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1044,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1045 nspacedouble++;
1046 }
1047
1048 /* Copy data into free space, then initialize lnk */
1049 ierr = PetscLLCondensedClean(pN,adponz,current_space->array,lnk,lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1049,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1050
1051 current_space->array += adponz;
1052 current_space->local_used += adponz;
1053 current_space->local_remaining -= adponz;
1054 }
1055
1056 /* Symbolic calc of A_off * P_oth */
1057 ierr = MatSetOptionsPrefix(a->B,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1057,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1058 ierr = MatAppendOptionsPrefix(a->B,"inner_offdiag_");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1058,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1059 ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(a->B, ptap->P_oth, fill, &aopoth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1059,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1060 aopoth_seq = (Mat_SeqAIJ*)((aopoth)->data);
1061 aopothi = aopoth_seq->i; aopothj = aopoth_seq->j;
1062
1063 /* Allocate space for apj, adpj, aopj, ... */
1064 /* destroy lists of free space and other temporary array(s) */
1065
1066 ierr = PetscMalloc1(aopothi[am] + adpoi[am] + adpdi[am]+2, &ptap->apj)PetscMallocA(1,PETSC_FALSE,1066,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(aopothi[am] + adpoi[am] + adpdi[am]+2)*sizeof(**(&
ptap->apj)),(&ptap->apj))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1066,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1067 ierr = PetscMalloc1(adpoi[am]+2, &adpoj)PetscMallocA(1,PETSC_FALSE,1067,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(adpoi[am]+2)*sizeof(**(&adpoj)),(&adpoj))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1067,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1068
1069 /* Copy from linked list to j-array */
1070 ierr = PetscFreeSpaceContiguous(&free_space_diag,adpoj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1070,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1071 ierr = PetscLLDestroy(lnk,lnkbt)(((*PetscTrFree)((void*)(lnk),1071,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((lnk) = 0,0)) || PetscBTDestroy(&(lnkbt)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1071,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1072
1073 adpoJ = adpoj;
1074 adpdJ = adpdj;
1075 aopJ = aopothj;
1076 apj = ptap->apj;
1077 apJ = apj; /* still empty */
1078
1079 /* Merge j-arrays of A_off * P, A_diag * P_loc_off, and */
1080 /* A_diag * P_loc_diag to get A*P */
1081 for (i = 0; i < am; i++) {
1082 aopnz = aopothi[i+1] - aopothi[i];
1083 adponz = adpoi[i+1] - adpoi[i];
1084 adpdnz = adpdi[i+1] - adpdi[i];
1085
1086 /* Correct indices from A_diag*P_diag */
1087 for(i1 = 0; i1 < adpdnz; i1++) {
1088 adpdJ[i1] += p_colstart;
1089 }
1090 /* Merge j-arrays of A_diag * P_loc_off and A_diag * P_loc_diag and A_off * P_oth */
1091 Merge3SortedArrays(adponz, adpoJ, adpdnz, adpdJ, aopnz, aopJ, &apnz, apJ);
1092 ierr = MatPreallocateSet(i+rstart, apnz, apJ, dnz, onz)0;{ PetscInt __i; if (i+rstart < __rstart) return PetscError
(((MPI_Comm)0x44000001),1092,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D less than first local row %D"
,i+rstart,__rstart); if (i+rstart >= __rstart+__nrows) return
PetscError(((MPI_Comm)0x44000001),1092,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D greater than last local row %D"
,i+rstart,__rstart+__nrows-1); for (__i=0; __i<apnz; __i++
) { if ((apJ)[__i] < __start || (apJ)[__i] >= __end) onz
[i+rstart - __rstart]++; else if (dnz[i+rstart - __rstart] <
__ncols) dnz[i+rstart - __rstart]++; }}
; CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1092,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1093
1094 aopJ += aopnz;
1095 adpoJ += adponz;
1096 adpdJ += adpdnz;
1097 apJ += apnz;
1098 api[i+1] = api[i] + apnz;
1099 }
1100
1101 /* malloc apa to store dense row A[i,:]*P */
1102 ierr = PetscCalloc1(pN+2,&apa)PetscMallocA(1,PETSC_TRUE,1102,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(pN+2)*sizeof(**(&apa)),(&apa))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1102,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1103
1104 ptap->apa = apa;
1105 /* create and assemble symbolic parallel matrix Cmpi */
1106 ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1106,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1107 ierr = MatSetSizes(Cmpi,am,pn,PETSC_DETERMINE-1,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1107,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1108 ierr = MatSetBlockSizesFromMats(Cmpi,A,P);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1108,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1109 ierr = MatGetType(A,&mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1109,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1110 ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1110,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1111 ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1111,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1112
1113
1114 ierr = MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Cmpi, apj, api);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1114,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1115 ierr = MatAssemblyBegin(Cmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1115,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1116 ierr = MatAssemblyEnd(Cmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1116,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1117 ierr = MatPreallocateFinalize(dnz,onz)0;_4_ierr = PetscFreeA(2,1117,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(dnz),&(onz));do {if (__builtin_expect(!!(_4_ierr),
0)) return PetscError(((MPI_Comm)0x44000001),1117,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0);}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1117,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1118
1119
1120 ptap->destroy = Cmpi->ops->destroy;
1121 ptap->duplicate = Cmpi->ops->duplicate;
1122 Cmpi->ops->matmultnumeric = MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable;
1123 Cmpi->ops->destroy = MatDestroy_MPIAIJ_MatMatMult;
1124
1125 /* attach the supporting struct to Cmpi for reuse */
1126 c = (Mat_MPIAIJ*)Cmpi->data;
1127 c->ap = ptap;
1128 *C = Cmpi;
1129
1130 /* set MatInfo */
1131 afill = (PetscReal)api[am]/(adi[am]+aoi[am]+pi_loc[pm]+1) + 1.e-5;
1132 if (afill < 1.0) afill = 1.0;
1133 Cmpi->info.mallocs = nspacedouble;
1134 Cmpi->info.fill_ratio_given = fill;
1135 Cmpi->info.fill_ratio_needed = afill;
1136
1137#if defined(PETSC_USE_INFO1)
1138 if (api[am]) {
1139 ierr = PetscInfo3(Cmpi,"Reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)afill)PetscInfo_Private(__func__,Cmpi,"Reallocs %D; Fill ratio: given %g needed %g.\n"
,nspacedouble,(double)fill,(double)afill)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1139,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1140 ierr = PetscInfo1(Cmpi,"Use MatMatMult(A,B,MatReuse,%g,&C) for best performance.;\n",(double)afill)PetscInfo_Private(__func__,Cmpi,"Use MatMatMult(A,B,MatReuse,%g,&C) for best performance.;\n"
,(double)afill)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1140,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1141 } else {
1142 ierr = PetscInfo(Cmpi,"Empty matrix product\n")PetscInfo_Private(__func__,Cmpi,"Empty matrix product\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1142,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1143 }
1144#endif
1145
1146 ierr = MatDestroy(&aopoth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1146,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1147 ierr = MatDestroy(&adpd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1147,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1148 ierr = PetscFree(j_temp)((*PetscTrFree)((void*)(j_temp),1148,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((j_temp) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1148,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1149 ierr = PetscFree(adpoj)((*PetscTrFree)((void*)(adpoj),1149,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((adpoj) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1149,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1150 ierr = PetscFree(adpoi)((*PetscTrFree)((void*)(adpoi),1150,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((adpoi) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1150,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1151 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1152}
1153
1154
1155/*-------------------------------------------------------------------------*/
1156PetscErrorCode MatTransposeMatMult_MPIAIJ_MPIAIJ(Mat P,Mat A,MatReuse scall,PetscReal fill,Mat *C)
1157{
1158 PetscErrorCode ierr;
1159 const char *algTypes[3] = {"scalable","nonscalable","matmatmult"};
1160 PetscInt aN=A->cmap->N,alg=1; /* set default algorithm */
1161 PetscBool flg;
1162
1163 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 1163; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1164 if (scall == MAT_INITIAL_MATRIX) {
1165 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatTransposeMatMult","Mat")0; do { PetscOptionItems PetscOptionsObjectBase; PetscOptionItems
*PetscOptionsObject = &PetscOptionsObjectBase; PetscMemzero
(PetscOptionsObject,sizeof(PetscOptionItems)); for (PetscOptionsObject
->count=(PetscOptionsPublish?-1:1); PetscOptionsObject->
count<2; PetscOptionsObject->count++) { PetscErrorCode _5_ierr
= PetscOptionsBegin_Private(PetscOptionsObject,PetscObjectComm
((PetscObject)A),((PetscObject)A)->prefix,"MatTransposeMatMult"
,"Mat");do {if (__builtin_expect(!!(_5_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),1165,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_5_ierr,PETSC_ERROR_REPEAT," ");} while (0);
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1165,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1166 ierr = PetscOptionsEList("-mattransposematmult_via","Algorithmic approach","MatTransposeMatMult",algTypes,3,algTypes[1],&alg,&flg)PetscOptionsEList_Private(PetscOptionsObject,"-mattransposematmult_via"
,"Algorithmic approach","MatTransposeMatMult",algTypes,3,algTypes
[1],&alg,&flg)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1166,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1167 ierr = PetscOptionsEnd()_5_ierr = PetscOptionsEnd_Private(PetscOptionsObject);do {if (
__builtin_expect(!!(_5_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1167,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_5_ierr,PETSC_ERROR_REPEAT," ");} while (0);}} while (0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1167,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1168
1169 ierr = PetscLogEventBegin(MAT_TransposeMatMultSymbolic,P,A,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeMatMultSymbolic].active) ? (*PetscLogPLB)((MAT_TransposeMatMultSymbolic
),0,(PetscObject)(P),(PetscObject)(A),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1169,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1170 switch (alg) {
1171 case 1:
1172 if (!flg && aN > 100000) { /* may switch to scalable algorithm as default */
1173 MatInfo Ainfo,Pinfo;
1174 PetscInt nz_local;
1175 PetscBool alg_scalable_loc=PETSC_FALSE,alg_scalable;
1176 MPI_Comm comm;
1177
1178 ierr = MatGetInfo(A,MAT_LOCAL,&Ainfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1178,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1179 ierr = MatGetInfo(P,MAT_LOCAL,&Pinfo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1179,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1180 nz_local = (PetscInt)(Ainfo.nz_allocated + Pinfo.nz_allocated); /* estimated local nonzero entries */
1181
1182 if (aN > fill*nz_local) alg_scalable_loc = PETSC_TRUE;
1183 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1183,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1184 ierr = MPIU_Allreduce(&alg_scalable_loc,&alg_scalable,1,MPIU_BOOL,MPI_LOR,comm)(PetscAllreduceBarrierCheck(comm,1,1184,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((petsc_allreduce_ct += PetscMPIParallelComm((comm)),0) ||
MPI_Allreduce((&alg_scalable_loc),(&alg_scalable),(1
),(MPIU_BOOL),((MPI_Op)(0x58000007)),(comm))))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1184,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1185
1186 if (alg_scalable) {
1187 alg = 0; /* scalable algorithm would slower than nonscalable algorithm */
1188 ierr = MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(P,A,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1188,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1189 break;
1190 }
1191 }
1192 ierr = MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(P,A,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1192,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1193 break;
1194 case 2:
1195 {
1196 Mat Pt;
1197 Mat_APMPI *ptap;
1198 Mat_MPIAIJ *c;
1199 ierr = MatTranspose(P,MAT_INITIAL_MATRIX,&Pt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1199,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1200 ierr = MatMatMult(Pt,A,MAT_INITIAL_MATRIX,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1200,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1201 c = (Mat_MPIAIJ*)(*C)->data;
1202 ptap = c->ap;
1203 if (ptap) {
1204 ptap->Pt = Pt;
1205 (*C)->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1206 }
1207 (*C)->ops->mattransposemultnumeric = MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult;
1208 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1209 }
1210 break;
1211 default: /* scalable algorithm */
1212 ierr = MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(P,A,fill,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1212,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1213 break;
1214 }
1215 ierr = PetscLogEventEnd(MAT_TransposeMatMultSymbolic,P,A,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeMatMultSymbolic].active) ? (*PetscLogPLE)((MAT_TransposeMatMultSymbolic
),0,(PetscObject)(P),(PetscObject)(A),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1215,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1216
1217 {
1218 Mat_MPIAIJ *c = (Mat_MPIAIJ*)(*C)->data;
1219 Mat_APMPI *ap = c->ap;
1220 ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures","Mat")0; do { PetscOptionItems PetscOptionsObjectBase; PetscOptionItems
*PetscOptionsObject = &PetscOptionsObjectBase; PetscMemzero
(PetscOptionsObject,sizeof(PetscOptionItems)); for (PetscOptionsObject
->count=(PetscOptionsPublish?-1:1); PetscOptionsObject->
count<2; PetscOptionsObject->count++) { PetscErrorCode _5_ierr
= PetscOptionsBegin_Private(PetscOptionsObject,PetscObjectComm
((PetscObject)(*C)),((PetscObject)(*C))->prefix,"MatFreeIntermediateDataStructures"
,"Mat");do {if (__builtin_expect(!!(_5_ierr),0)) return PetscError
(((MPI_Comm)0x44000001),1220,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_5_ierr,PETSC_ERROR_REPEAT," ");} while (0);
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1220,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1221 ap->freestruct = PETSC_FALSE;
1222 ierr = PetscOptionsBool("-mat_freeintermediatedatastructures","Free intermediate data structures", "MatFreeIntermediateDataStructures",ap->freestruct,&ap->freestruct, NULL)PetscOptionsBool_Private(PetscOptionsObject,"-mat_freeintermediatedatastructures"
,"Free intermediate data structures","MatFreeIntermediateDataStructures"
,ap->freestruct,&ap->freestruct,((void*)0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1222,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1223 ierr = PetscOptionsEnd()_5_ierr = PetscOptionsEnd_Private(PetscOptionsObject);do {if (
__builtin_expect(!!(_5_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1223,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_5_ierr,PETSC_ERROR_REPEAT," ");} while (0);}} while (0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1223,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1224 }
1225 }
1226
1227 ierr = PetscLogEventBegin(MAT_TransposeMatMultNumeric,P,A,0,0)(((PetscLogPLB && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeMatMultNumeric].active) ? (*PetscLogPLB)((MAT_TransposeMatMultNumeric
),0,(PetscObject)(P),(PetscObject)(A),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1227,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1228 ierr = (*(*C)->ops->mattransposemultnumeric)(P,A,*C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1228,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1229 ierr = PetscLogEventEnd(MAT_TransposeMatMultNumeric,P,A,0,0)(((PetscLogPLE && petsc_stageLog->stageInfo[petsc_stageLog
->curStage].perfInfo.active && petsc_stageLog->
stageInfo[petsc_stageLog->curStage].eventLog->eventInfo
[MAT_TransposeMatMultNumeric].active) ? (*PetscLogPLE)((MAT_TransposeMatMultNumeric
),0,(PetscObject)(P),(PetscObject)(A),(PetscObject)(0),(PetscObject
)(0)) : 0 ))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1229,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1230 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1231}
1232
1233/* This routine only works when scall=MAT_REUSE_MATRIX! */
1234PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult(Mat P,Mat A,Mat C)
1235{
1236 PetscErrorCode ierr;
1237 Mat_MPIAIJ *c=(Mat_MPIAIJ*)C->data;
1238 Mat_APMPI *ptap= c->ap;
1239 Mat Pt;
1240
1241 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 1241; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1242 if (!ptap) {
1243 MPI_Comm comm;
1244 ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1244,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1245 SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtA cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'")return PetscError(comm,1245,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,73,PETSC_ERROR_INITIAL,"PtA cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'"
)
;
1246 }
1247
1248 Pt=ptap->Pt;
1249 ierr = MatTranspose(P,MAT_REUSE_MATRIX,&Pt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1249,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1250 ierr = MatMatMultNumeric(Pt,A,C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1250,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1251
1252 /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
1253 if (ptap->freestruct) {
1254 ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1254,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1255 }
1256 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1257}
1258
1259/* This routine is modified from MatPtAPSymbolic_MPIAIJ_MPIAIJ() */
1260PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat P,Mat A,PetscReal fill,Mat *C)
1261{
1262 PetscErrorCode ierr;
1263 Mat_APMPI *ptap;
1264 Mat_MPIAIJ *p=(Mat_MPIAIJ*)P->data,*c;
1265 MPI_Comm comm;
1266 PetscMPIInt size,rank;
1267 Mat Cmpi;
1268 PetscFreeSpaceList free_space=NULL((void*)0),current_space=NULL((void*)0);
1269 PetscInt pn=P->cmap->n,aN=A->cmap->N,an=A->cmap->n;
1270 PetscInt *lnk,i,k,nsend;
1271 PetscBT lnkbt;
1272 PetscMPIInt tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
1273 PetscInt **buf_rj,**buf_ri,**buf_ri_k;
1274 PetscInt len,proc,*dnz,*onz,*owners,nzi;
1275 PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1276 MPI_Request *swaits,*rwaits;
1277 MPI_Status *sstatus,rstatus;
1278 PetscLayout rowmap;
1279 PetscInt *owners_co,*coi,*coj; /* i and j array of (p->B)^T*A*P - used in the communication */
1280 PetscMPIInt *len_r,*id_r; /* array of length of comm->size, store send/recv matrix values */
1281 PetscInt *Jptr,*prmap=p->garray,con,j,Crmax;
1282 Mat_SeqAIJ *a_loc,*c_loc,*c_oth;
1283 PetscTable ta;
1284 MatType mtype;
1285 const char *prefix;
1286
1287 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 1287; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1288 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1288,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1289 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1289,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1290 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1290,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1291
1292 /* create symbolic parallel matrix Cmpi */
1293 ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1293,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1294 ierr = MatGetType(A,&mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1294,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1295 ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1295,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1296
1297 Cmpi->ops->mattransposemultnumeric = MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable;
1298
1299 /* create struct Mat_APMPI and attached it to C later */
1300 ierr = PetscNew(&ptap)PetscMallocA(1,PETSC_TRUE,1300,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(1)*sizeof(**((&ptap))),((&ptap)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1300,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1301 ptap->reuse = MAT_INITIAL_MATRIX;
1302
1303 /* (0) compute Rd = Pd^T, Ro = Po^T */
1304 /* --------------------------------- */
1305 ierr = MatTranspose_SeqAIJ(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1305,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1306 ierr = MatTranspose_SeqAIJ(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1306,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1307
1308 /* (1) compute symbolic A_loc */
1309 /* ---------------------------*/
1310 ierr = MatMPIAIJGetLocalMat(A,MAT_INITIAL_MATRIX,&ptap->A_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1310,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1311
1312 /* (2-1) compute symbolic C_oth = Ro*A_loc */
1313 /* ------------------------------------ */
1314 ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1314,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1315 ierr = MatSetOptionsPrefix(ptap->Ro,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1315,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1316 ierr = MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1316,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1317 ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Ro,ptap->A_loc,fill,&ptap->C_oth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1317,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1318
1319 /* (3) send coj of C_oth to other processors */
1320 /* ------------------------------------------ */
1321 /* determine row ownership */
1322 ierr = PetscLayoutCreate(comm,&rowmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1322,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1323 rowmap->n = pn;
1324 rowmap->bs = 1;
1325 ierr = PetscLayoutSetUp(rowmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1325,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1326 owners = rowmap->range;
1327
1328 /* determine the number of messages to send, their lengths */
1329 ierr = PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co)PetscMallocA(4,PETSC_FALSE,1329,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(size)*sizeof(**(&len_s)),(&len_s),(size_t)(
size)*sizeof(**(&len_si)),(&len_si),(size_t)(size)*sizeof
(**(&sstatus)),(&sstatus),(size_t)(size+2)*sizeof(**(
&owners_co)),(&owners_co))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1329,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1330 ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1330,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1331 ierr = PetscMemzero(len_si,size*sizeof(PetscMPIInt));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1331,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1332
1333 c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
1334 coi = c_oth->i; coj = c_oth->j;
1335 con = ptap->C_oth->rmap->n;
1336 proc = 0;
1337 for (i=0; i<con; i++) {
1338 while (prmap[i] >= owners[proc+1]) proc++;
1339 len_si[proc]++; /* num of rows in Co(=Pt*A) to be sent to [proc] */
1340 len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
1341 }
1342
1343 len = 0; /* max length of buf_si[], see (4) */
1344 owners_co[0] = 0;
1345 nsend = 0;
1346 for (proc=0; proc<size; proc++) {
1347 owners_co[proc+1] = owners_co[proc] + len_si[proc];
1348 if (len_s[proc]) {
1349 nsend++;
1350 len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
1351 len += len_si[proc];
1352 }
1353 }
1354
1355 /* determine the number and length of messages to receive for coi and coj */
1356 ierr = PetscGatherNumberOfMessages(comm,NULL((void*)0),len_s,&nrecv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1356,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1357 ierr = PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1357,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1358
1359 /* post the Irecv and Isend of coj */
1360 ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1360,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1361 ierr = PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1361,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1362 ierr = PetscMalloc1(nsend+1,&swaits)PetscMallocA(1,PETSC_FALSE,1362,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(nsend+1)*sizeof(**(&swaits)),(&swaits))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1362,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1363 for (proc=0, k=0; proc<size; proc++) {
1364 if (!len_s[proc]) continue;
1365 i = owners_co[proc];
1366 ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k)((petsc_isend_ct++,0) || PetscMPITypeSize(&(petsc_isend_len
),(len_s[proc]),(((MPI_Datatype)0x4c000405))) || MPI_Isend((coj
+coi[i]),(len_s[proc]),(((MPI_Datatype)0x4c000405)),(proc),(tagj
),(comm),(swaits+k)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1366,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1367 k++;
1368 }
1369
1370 /* (2-2) compute symbolic C_loc = Rd*A_loc */
1371 /* ---------------------------------------- */
1372 ierr = MatSetOptionsPrefix(ptap->Rd,prefix);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1372,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1373 ierr = MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1373,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1374 ierr = MatMatMultSymbolic_SeqAIJ_SeqAIJ(ptap->Rd,ptap->A_loc,fill,&ptap->C_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1374,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1375 c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
1376
1377 /* receives coj are complete */
1378 for (i=0; i<nrecv; i++) {
1379 ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus)((petsc_wait_any_ct++,petsc_sum_of_waits_ct++,0) || MPI_Waitany
((nrecv),(rwaits),(&icompleted),(&rstatus)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1379,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1380 }
1381 ierr = PetscFree(rwaits)((*PetscTrFree)((void*)(rwaits),1381,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((rwaits) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1381,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1382 if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus)((petsc_wait_all_ct++,petsc_sum_of_waits_ct += (PetscLogDouble
) (nsend),0) || MPI_Waitall((nsend),(swaits),(sstatus)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1382,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1383
1384 /* add received column indices into ta to update Crmax */
1385 a_loc = (Mat_SeqAIJ*)(ptap->A_loc)->data;
1386
1387 /* create and initialize a linked list */
1388 ierr = PetscTableCreate(an,aN,&ta);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1388,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* for compute Crmax */
1389 MatRowMergeMax_SeqAIJ(a_loc,ptap->A_loc->rmap->N,ta){ PetscInt _j,_row,_nz,*_col; if (a_loc) { for (_row=0; _row<
ptap->A_loc->rmap->N; _row++) { _nz = a_loc->i[_row
+1] - a_loc->i[_row]; for (_j=0; _j<_nz; _j++) { _col =
_j + a_loc->j + a_loc->i[_row]; PetscTableAdd(ta,*_col
+1,1,INSERT_VALUES); } } } }
;
1390
1391 for (k=0; k<nrecv; k++) {/* k-th received message */
1392 Jptr = buf_rj[k];
1393 for (j=0; j<len_r[k]; j++) {
1394 ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1394,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1395 }
1396 }
1397 ierr = PetscTableGetCount(ta,&Crmax);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1397,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1398 ierr = PetscTableDestroy(&ta);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1398,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1399
1400 /* (4) send and recv coi */
1401 /*-----------------------*/
1402 ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1402,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1403 ierr = PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1403,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1404 ierr = PetscMalloc1(len+1,&buf_s)PetscMallocA(1,PETSC_FALSE,1404,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(len+1)*sizeof(**(&buf_s)),(&buf_s))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1404,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1405 buf_si = buf_s; /* points to the beginning of k-th msg to be sent */
1406 for (proc=0,k=0; proc<size; proc++) {
1407 if (!len_s[proc]) continue;
1408 /* form outgoing message for i-structure:
1409 buf_si[0]: nrows to be sent
1410 [1:nrows]: row index (global)
1411 [nrows+1:2*nrows+1]: i-structure index
1412 */
1413 /*-------------------------------------------*/
1414 nrows = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
1415 buf_si_i = buf_si + nrows+1;
1416 buf_si[0] = nrows;
1417 buf_si_i[0] = 0;
1418 nrows = 0;
1419 for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1420 nzi = coi[i+1] - coi[i];
1421 buf_si_i[nrows+1] = buf_si_i[nrows] + nzi; /* i-structure */
1422 buf_si[nrows+1] = prmap[i] -owners[proc]; /* local row index */
1423 nrows++;
1424 }
1425 ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k)((petsc_isend_ct++,0) || PetscMPITypeSize(&(petsc_isend_len
),(len_si[proc]),(((MPI_Datatype)0x4c000405))) || MPI_Isend((
buf_si),(len_si[proc]),(((MPI_Datatype)0x4c000405)),(proc),(tagi
),(comm),(swaits+k)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1425,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1426 k++;
1427 buf_si += len_si[proc];
1428 }
1429 for (i=0; i<nrecv; i++) {
1430 ierr = MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus)((petsc_wait_any_ct++,petsc_sum_of_waits_ct++,0) || MPI_Waitany
((nrecv),(rwaits),(&icompleted),(&rstatus)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1430,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1431 }
1432 ierr = PetscFree(rwaits)((*PetscTrFree)((void*)(rwaits),1432,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((rwaits) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1432,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1433 if (nsend) {ierr = MPI_Waitall(nsend,swaits,sstatus)((petsc_wait_all_ct++,petsc_sum_of_waits_ct += (PetscLogDouble
) (nsend),0) || MPI_Waitall((nsend),(swaits),(sstatus)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1433,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1434
1435 ierr = PetscFree4(len_s,len_si,sstatus,owners_co)PetscFreeA(4,1435,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(len_s),&(len_si),&(sstatus),&(owners_co))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1435,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1436 ierr = PetscFree(len_ri)((*PetscTrFree)((void*)(len_ri),1436,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((len_ri) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1436,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1437 ierr = PetscFree(swaits)((*PetscTrFree)((void*)(swaits),1437,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((swaits) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1437,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1438 ierr = PetscFree(buf_s)((*PetscTrFree)((void*)(buf_s),1438,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((buf_s) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1438,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1439
1440 /* (5) compute the local portion of Cmpi */
1441 /* ------------------------------------------ */
1442 /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
1443 ierr = PetscFreeSpaceGet(Crmax,&free_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1443,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1444 current_space = free_space;
1445
1446 ierr = PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci)PetscMallocA(3,PETSC_FALSE,1446,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(nrecv)*sizeof(**(&buf_ri_k)),(&buf_ri_k),(size_t
)(nrecv)*sizeof(**(&nextrow)),(&nextrow),(size_t)(nrecv
)*sizeof(**(&nextci)),(&nextci))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1446,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1447 for (k=0; k<nrecv; k++) {
1448 buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1449 nrows = *buf_ri_k[k];
1450 nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */
1451 nextci[k] = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure */
1452 }
1453
1454 ierr = MatPreallocateInitialize(comm,pn,an,dnz,onz)0; { PetscErrorCode _4_ierr; PetscInt __nrows = (pn),__ncols =
(an),__rstart,__start,__end; _4_ierr = PetscMallocA(2,PETSC_TRUE
,1454,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)((size_t)__nrows)*sizeof(**(&dnz)),(&dnz),(size_t
)((size_t)__nrows)*sizeof(**(&onz)),(&onz));do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),1454
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = 0; __end
= __start; _4_ierr = MPI_Scan(&__ncols,&__end,1,((MPI_Datatype
)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),1454
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = __end
- __ncols; _4_ierr = MPI_Scan(&__nrows,&__rstart,1,(
(MPI_Datatype)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (
__builtin_expect(!!(_4_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1454,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __rstart = __rstart
- __nrows;
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1454,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1455 ierr = PetscLLCondensedCreate(Crmax,aN,&lnk,&lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1455,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1456 for (i=0; i<pn; i++) {
1457 /* add C_loc into Cmpi */
1458 nzi = c_loc->i[i+1] - c_loc->i[i];
1459 Jptr = c_loc->j + c_loc->i[i];
1460 ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1460,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1461
1462 /* add received col data into lnk */
1463 for (k=0; k<nrecv; k++) { /* k-th received message */
1464 if (i == *nextrow[k]) { /* i-th row */
1465 nzi = *(nextci[k]+1) - *nextci[k];
1466 Jptr = buf_rj[k] + *nextci[k];
1467 ierr = PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1467,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1468 nextrow[k]++; nextci[k]++;
1469 }
1470 }
1471 nzi = lnk[0];
1472
1473 /* copy data into free space, then initialize lnk */
1474 ierr = PetscLLCondensedClean(aN,nzi,current_space->array,lnk,lnkbt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1474,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1475 ierr = MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz)0;{ PetscInt __i; if (i+owners[rank] < __rstart) return PetscError
(((MPI_Comm)0x44000001),1475,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D less than first local row %D"
,i+owners[rank],__rstart); if (i+owners[rank] >= __rstart+
__nrows) return PetscError(((MPI_Comm)0x44000001),1475,__func__
,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D greater than last local row %D"
,i+owners[rank],__rstart+__nrows-1); for (__i=0; __i<nzi; __i
++) { if ((current_space->array)[__i] < __start || (current_space
->array)[__i] >= __end) onz[i+owners[rank] - __rstart]++
; else if (dnz[i+owners[rank] - __rstart] < __ncols) dnz[i
+owners[rank] - __rstart]++; }}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1475,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1476 }
1477 ierr = PetscFree3(buf_ri_k,nextrow,nextci)PetscFreeA(3,1477,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(buf_ri_k),&(nextrow),&(nextci))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1477,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1478 ierr = PetscLLDestroy(lnk,lnkbt)(((*PetscTrFree)((void*)(lnk),1478,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((lnk) = 0,0)) || PetscBTDestroy(&(lnkbt)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1478,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1479 ierr = PetscFreeSpaceDestroy(free_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1479,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1480
1481 /* local sizes and preallocation */
1482 ierr = MatSetSizes(Cmpi,pn,an,PETSC_DETERMINE-1,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1482,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1483 if (P->cmap->bs > 0) {ierr = PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1483,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1484 if (A->cmap->bs > 0) {ierr = PetscLayoutSetBlockSize(Cmpi->cmap,A->cmap->bs);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1484,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1485 ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1485,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1486 ierr = MatPreallocateFinalize(dnz,onz)0;_4_ierr = PetscFreeA(2,1486,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(dnz),&(onz));do {if (__builtin_expect(!!(_4_ierr),
0)) return PetscError(((MPI_Comm)0x44000001),1486,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0);}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1486,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1487
1488 /* members in merge */
1489 ierr = PetscFree(id_r)((*PetscTrFree)((void*)(id_r),1489,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((id_r) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1489,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1490 ierr = PetscFree(len_r)((*PetscTrFree)((void*)(len_r),1490,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((len_r) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1490,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1491 ierr = PetscFree(buf_ri[0])((*PetscTrFree)((void*)(buf_ri[0]),1491,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((buf_ri[0]) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1491,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1492 ierr = PetscFree(buf_ri)((*PetscTrFree)((void*)(buf_ri),1492,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((buf_ri) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1492,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1493 ierr = PetscFree(buf_rj[0])((*PetscTrFree)((void*)(buf_rj[0]),1493,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((buf_rj[0]) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1493,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1494 ierr = PetscFree(buf_rj)((*PetscTrFree)((void*)(buf_rj),1494,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((buf_rj) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1494,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1495 ierr = PetscLayoutDestroy(&rowmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1495,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1496
1497 /* attach the supporting struct to Cmpi for reuse */
1498 c = (Mat_MPIAIJ*)Cmpi->data;
1499 c->ap = ptap;
1500 ptap->destroy = Cmpi->ops->destroy;
1501
1502 /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1503 Cmpi->assembled = PETSC_FALSE;
1504 Cmpi->ops->destroy = MatDestroy_MPIAIJ_PtAP;
1505 Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
1506
1507 *C = Cmpi;
1508 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1509}
1510
1511PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat P,Mat A,Mat C)
1512{
1513 PetscErrorCode ierr;
1514 Mat_MPIAIJ *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
1515 Mat_SeqAIJ *c_seq;
1516 Mat_APMPI *ptap = c->ap;
1517 Mat A_loc,C_loc,C_oth;
1518 PetscInt i,rstart,rend,cm,ncols,row;
1519 const PetscInt *cols;
1520 const PetscScalar *vals;
1521
1522 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 1522; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1523 if (!ptap) {
1524 MPI_Comm comm;
1525 ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1525,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1526 SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtA cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'")return PetscError(comm,1526,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,73,PETSC_ERROR_INITIAL,"PtA cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'"
)
;
1527 }
1528
1529 ierr = MatZeroEntries(C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1529,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1530
1531 if (ptap->reuse == MAT_REUSE_MATRIX) {
1532 /* These matrices are obtained in MatTransposeMatMultSymbolic() */
1533 /* 1) get R = Pd^T, Ro = Po^T */
1534 /*----------------------------*/
1535 ierr = MatTranspose_SeqAIJ(p->A,MAT_REUSE_MATRIX,&ptap->Rd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1535,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1536 ierr = MatTranspose_SeqAIJ(p->B,MAT_REUSE_MATRIX,&ptap->Ro);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1536,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1537
1538 /* 2) compute numeric A_loc */
1539 /*--------------------------*/
1540 ierr = MatMPIAIJGetLocalMat(A,MAT_REUSE_MATRIX,&ptap->A_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1540,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1541 }
1542
1543 /* 3) C_loc = Rd*A_loc, C_oth = Ro*A_loc */
1544 A_loc = ptap->A_loc;
1545 ierr = ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,A_loc,ptap->C_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1545,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1546 ierr = ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,A_loc,ptap->C_oth);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1546,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1547 C_loc = ptap->C_loc;
1548 C_oth = ptap->C_oth;
1549
1550 /* add C_loc and Co to to C */
1551 ierr = MatGetOwnershipRange(C,&rstart,&rend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1551,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1552
1553 /* C_loc -> C */
1554 cm = C_loc->rmap->N;
1555 c_seq = (Mat_SeqAIJ*)C_loc->data;
1556 cols = c_seq->j;
1557 vals = c_seq->a;
1558 for (i=0; i<cm; i++) {
1559 ncols = c_seq->i[i+1] - c_seq->i[i];
1560 row = rstart + i;
1561 ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1561,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1562 cols += ncols; vals += ncols;
1563 }
1564
1565 /* Co -> C, off-processor part */
1566 cm = C_oth->rmap->N;
1567 c_seq = (Mat_SeqAIJ*)C_oth->data;
1568 cols = c_seq->j;
1569 vals = c_seq->a;
1570 for (i=0; i<cm; i++) {
1571 ncols = c_seq->i[i+1] - c_seq->i[i];
1572 row = p->garray[i];
1573 ierr = MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1573,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1574 cols += ncols; vals += ncols;
1575 }
1576 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1576,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1577 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1577,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1578
1579 ptap->reuse = MAT_REUSE_MATRIX;
1580
1581 /* supporting struct ptap consumes almost same amount of memory as C=PtAP, release it if C will not be updated by A and P */
1582 if (ptap->freestruct) {
1583 ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1583,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1584 }
1585 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1586}
1587
1588PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ(Mat P,Mat A,Mat C)
1589{
1590 PetscErrorCode ierr;
1591 Mat_Merge_SeqsToMPI *merge;
1592 Mat_MPIAIJ *p =(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
1593 Mat_SeqAIJ *pd=(Mat_SeqAIJ*)(p->A)->data,*po=(Mat_SeqAIJ*)(p->B)->data;
1594 Mat_APMPI *ptap;
1595 PetscInt *adj;
1596 PetscInt i,j,k,anz,pnz,row,*cj,nexta;
1597 MatScalar *ada,*ca,valtmp;
1598 PetscInt am =A->rmap->n,cm=C->rmap->n,pon=(p->B)->cmap->n;
1599 MPI_Comm comm;
1600 PetscMPIInt size,rank,taga,*len_s;
1601 PetscInt *owners,proc,nrows,**buf_ri_k,**nextrow,**nextci;
1602 PetscInt **buf_ri,**buf_rj;
1603 PetscInt cnz=0,*bj_i,*bi,*bj,bnz,nextcj; /* bi,bj,ba: local array of C(mpi mat) */
1604 MPI_Request *s_waits,*r_waits;
1605 MPI_Status *status;
1606 MatScalar **abuf_r,*ba_i,*pA,*coa,*ba;
1607 PetscInt *ai,*aj,*coi,*coj,*poJ,*pdJ;
1608 Mat A_loc;
1609 Mat_SeqAIJ *a_loc;
1610
1611 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 1611; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1612 ierr = PetscObjectGetComm((PetscObject)C,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1612,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1613 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1613,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1614 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1614,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1615
1616 ptap = c->ap;
1617 if (!ptap) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE,"PtA cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'")return PetscError(comm,1617,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,73,PETSC_ERROR_INITIAL,"PtA cannot be reused. Do not call MatFreeIntermediateDataStructures() or use '-mat_freeintermediatedatastructures'"
)
;
1618 merge = ptap->merge;
1619
1620 /* 2) compute numeric C_seq = P_loc^T*A_loc */
1621 /*------------------------------------------*/
1622 /* get data from symbolic products */
1623 coi = merge->coi; coj = merge->coj;
1624 ierr = PetscCalloc1(coi[pon]+1,&coa)PetscMallocA(1,PETSC_TRUE,1624,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(coi[pon]+1)*sizeof(**(&coa)),(&coa))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1624,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1625 bi = merge->bi; bj = merge->bj;
1626 owners = merge->rowmap->range;
1627 ierr = PetscCalloc1(bi[cm]+1,&ba)PetscMallocA(1,PETSC_TRUE,1627,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(bi[cm]+1)*sizeof(**(&ba)),(&ba))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1627,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1628
1629 /* get A_loc by taking all local rows of A */
1630 A_loc = ptap->A_loc;
1631 ierr = MatMPIAIJGetLocalMat(A,MAT_REUSE_MATRIX,&A_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1631,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1632 a_loc = (Mat_SeqAIJ*)(A_loc)->data;
1633 ai = a_loc->i;
1634 aj = a_loc->j;
1635
1636 for (i=0; i<am; i++) {
1637 anz = ai[i+1] - ai[i];
1638 adj = aj + ai[i];
1639 ada = a_loc->a + ai[i];
1640
1641 /* 2-b) Compute Cseq = P_loc[i,:]^T*A[i,:] using outer product */
1642 /*-------------------------------------------------------------*/
1643 /* put the value into Co=(p->B)^T*A (off-diagonal part, send to others) */
1644 pnz = po->i[i+1] - po->i[i];
1645 poJ = po->j + po->i[i];
1646 pA = po->a + po->i[i];
1647 for (j=0; j<pnz; j++) {
1648 row = poJ[j];
1649 cj = coj + coi[row];
1650 ca = coa + coi[row];
1651 /* perform sparse axpy */
1652 nexta = 0;
1653 valtmp = pA[j];
1654 for (k=0; nexta<anz; k++) {
1655 if (cj[k] == adj[nexta]) {
1656 ca[k] += valtmp*ada[nexta];
1657 nexta++;
1658 }
1659 }
1660 ierr = PetscLogFlops(2.0*anz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1660,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1661 }
1662
1663 /* put the value into Cd (diagonal part) */
1664 pnz = pd->i[i+1] - pd->i[i];
1665 pdJ = pd->j + pd->i[i];
1666 pA = pd->a + pd->i[i];
1667 for (j=0; j<pnz; j++) {
1668 row = pdJ[j];
1669 cj = bj + bi[row];
1670 ca = ba + bi[row];
1671 /* perform sparse axpy */
1672 nexta = 0;
1673 valtmp = pA[j];
1674 for (k=0; nexta<anz; k++) {
1675 if (cj[k] == adj[nexta]) {
1676 ca[k] += valtmp*ada[nexta];
1677 nexta++;
1678 }
1679 }
1680 ierr = PetscLogFlops(2.0*anz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1680,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1681 }
1682 }
1683
1684 /* 3) send and recv matrix values coa */
1685 /*------------------------------------*/
1686 buf_ri = merge->buf_ri;
1687 buf_rj = merge->buf_rj;
1688 len_s = merge->len_s;
1689 ierr = PetscCommGetNewTag(comm,&taga);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1689,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1690 ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1690,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1691
1692 ierr = PetscMalloc2(merge->nsend+1,&s_waits,size,&status)PetscMallocA(2,PETSC_FALSE,1692,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(merge->nsend+1)*sizeof(**(&s_waits)),(&s_waits
),(size_t)(size)*sizeof(**(&status)),(&status))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1692,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1693 for (proc=0,k=0; proc<size; proc++) {
1694 if (!len_s[proc]) continue;
1695 i = merge->owners_co[proc];
1696 ierr = MPI_Isend(coa+coi[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k)((petsc_isend_ct++,0) || PetscMPITypeSize(&(petsc_isend_len
),(len_s[proc]),(((MPI_Datatype)0x4c00080b))) || MPI_Isend((coa
+coi[i]),(len_s[proc]),(((MPI_Datatype)0x4c00080b)),(proc),(taga
),(comm),(s_waits+k)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1696,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1697 k++;
1698 }
1699 if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status)((petsc_wait_all_ct++,petsc_sum_of_waits_ct += (PetscLogDouble
) (merge->nrecv),0) || MPI_Waitall((merge->nrecv),(r_waits
),(status)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1699,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1700 if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status)((petsc_wait_all_ct++,petsc_sum_of_waits_ct += (PetscLogDouble
) (merge->nsend),0) || MPI_Waitall((merge->nsend),(s_waits
),(status)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1700,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1701
1702 ierr = PetscFree2(s_waits,status)PetscFreeA(2,1702,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(s_waits),&(status))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1702,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1703 ierr = PetscFree(r_waits)((*PetscTrFree)((void*)(r_waits),1703,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((r_waits) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1703,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1704 ierr = PetscFree(coa)((*PetscTrFree)((void*)(coa),1704,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((coa) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1704,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1705
1706 /* 4) insert local Cseq and received values into Cmpi */
1707 /*----------------------------------------------------*/
1708 ierr = PetscMalloc3(merge->nrecv,&buf_ri_k,merge->nrecv,&nextrow,merge->nrecv,&nextci)PetscMallocA(3,PETSC_FALSE,1708,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(merge->nrecv)*sizeof(**(&buf_ri_k)),(&buf_ri_k
),(size_t)(merge->nrecv)*sizeof(**(&nextrow)),(&nextrow
),(size_t)(merge->nrecv)*sizeof(**(&nextci)),(&nextci
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1708,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1709 for (k=0; k<merge->nrecv; k++) {
1710 buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1711 nrows = *(buf_ri_k[k]);
1712 nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */
1713 nextci[k] = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure */
1714 }
1715
1716 for (i=0; i<cm; i++) {
1717 row = owners[rank] + i; /* global row index of C_seq */
1718 bj_i = bj + bi[i]; /* col indices of the i-th row of C */
1719 ba_i = ba + bi[i];
1720 bnz = bi[i+1] - bi[i];
1721 /* add received vals into ba */
1722 for (k=0; k<merge->nrecv; k++) { /* k-th received message */
1723 /* i-th row */
1724 if (i == *nextrow[k]) {
1725 cnz = *(nextci[k]+1) - *nextci[k];
1726 cj = buf_rj[k] + *(nextci[k]);
1727 ca = abuf_r[k] + *(nextci[k]);
1728 nextcj = 0;
1729 for (j=0; nextcj<cnz; j++) {
1730 if (bj_i[j] == cj[nextcj]) { /* bcol == ccol */
1731 ba_i[j] += ca[nextcj++];
1732 }
1733 }
1734 nextrow[k]++; nextci[k]++;
1735 ierr = PetscLogFlops(2.0*cnz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1735,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1736 }
1737 }
1738 ierr = MatSetValues(C,1,&row,bnz,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1738,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1739 }
1740 ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1740,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1741 ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1741,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1742
1743 ierr = PetscFree(ba)((*PetscTrFree)((void*)(ba),1743,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((ba) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1743,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1744 ierr = PetscFree(abuf_r[0])((*PetscTrFree)((void*)(abuf_r[0]),1744,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((abuf_r[0]) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1744,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1745 ierr = PetscFree(abuf_r)((*PetscTrFree)((void*)(abuf_r),1745,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((abuf_r) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1745,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1746 ierr = PetscFree3(buf_ri_k,nextrow,nextci)PetscFreeA(3,1746,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(buf_ri_k),&(nextrow),&(nextci))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1746,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1747
1748 if (ptap->freestruct) {
1749 ierr = MatFreeIntermediateDataStructures(C);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1749,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1750 }
1751 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
1752}
1753
1754PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(Mat P,Mat A,PetscReal fill,Mat *C)
1755{
1756 PetscErrorCode ierr;
1757 Mat Cmpi,A_loc,POt,PDt;
1758 Mat_APMPI *ptap;
1759 PetscFreeSpaceList free_space=NULL((void*)0),current_space=NULL((void*)0);
1760 Mat_MPIAIJ *p=(Mat_MPIAIJ*)P->data,*a=(Mat_MPIAIJ*)A->data,*c;
1761 PetscInt *pdti,*pdtj,*poti,*potj,*ptJ;
1762 PetscInt nnz;
1763 PetscInt *lnk,*owners_co,*coi,*coj,i,k,pnz,row;
1764 PetscInt am =A->rmap->n,pn=P->cmap->n;
1765 MPI_Comm comm;
1766 PetscMPIInt size,rank,tagi,tagj,*len_si,*len_s,*len_ri;
1767 PetscInt **buf_rj,**buf_ri,**buf_ri_k;
1768 PetscInt len,proc,*dnz,*onz,*owners;
1769 PetscInt nzi,*bi,*bj;
1770 PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1771 MPI_Request *swaits,*rwaits;
1772 MPI_Status *sstatus,rstatus;
1773 Mat_Merge_SeqsToMPI *merge;
1774 PetscInt *ai,*aj,*Jptr,anz,*prmap=p->garray,pon,nspacedouble=0,j;
1775 PetscReal afill =1.0,afill_tmp;
1776 PetscInt rstart = P->cmap->rstart,rmax,aN=A->cmap->N,Armax;
1777 PetscScalar *vals;
1778 Mat_SeqAIJ *a_loc,*pdt,*pot;
1779 PetscTable ta;
1780 MatType mtype;
1781
1782 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
; petscstack->line[petscstack->currentsize] = 1782; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
1783 ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1783,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1784 /* check if matrix local sizes are compatible */
1785 if (A->rmap->rstart != P->rmap->rstart || A->rmap->rend != P->rmap->rend) SETERRQ4(comm,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, A (%D, %D) != P (%D,%D)",A->rmap->rstart,A->rmap->rend,P->rmap->rstart,P->rmap->rend)return PetscError(comm,1785,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,60,PETSC_ERROR_INITIAL,"Matrix local dimensions are incompatible, A (%D, %D) != P (%D,%D)"
,A->rmap->rstart,A->rmap->rend,P->rmap->rstart
,P->rmap->rend)
;
1786
1787 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1787,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1788 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1788,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1789
1790 /* create struct Mat_APMPI and attached it to C later */
1791 ierr = PetscNew(&ptap)PetscMallocA(1,PETSC_TRUE,1791,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(1)*sizeof(**((&ptap))),((&ptap)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1791,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1792
1793 /* get A_loc by taking all local rows of A */
1794 ierr = MatMPIAIJGetLocalMat(A,MAT_INITIAL_MATRIX,&A_loc);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1794,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1795
1796 ptap->A_loc = A_loc;
1797 a_loc = (Mat_SeqAIJ*)(A_loc)->data;
1798 ai = a_loc->i;
1799 aj = a_loc->j;
1800
1801 /* determine symbolic Co=(p->B)^T*A - send to others */
1802 /*----------------------------------------------------*/
1803 ierr = MatTransposeSymbolic_SeqAIJ(p->A,&PDt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1803,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1804 pdt = (Mat_SeqAIJ*)PDt->data;
1805 pdti = pdt->i; pdtj = pdt->j;
1806
1807 ierr = MatTransposeSymbolic_SeqAIJ(p->B,&POt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1807,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1808 pot = (Mat_SeqAIJ*)POt->data;
1809 poti = pot->i; potj = pot->j;
1810
1811 /* then, compute symbolic Co = (p->B)^T*A */
1812 pon = (p->B)->cmap->n; /* total num of rows to be sent to other processors
1813 >= (num of nonzero rows of C_seq) - pn */
1814 ierr = PetscMalloc1(pon+1,&coi)PetscMallocA(1,PETSC_FALSE,1814,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(pon+1)*sizeof(**(&coi)),(&coi))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1814,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1815 coi[0] = 0;
1816
1817 /* set initial free space to be fill*(nnz(p->B) + nnz(A)) */
1818 nnz = PetscRealIntMultTruncate(fill,PetscIntSumTruncate(poti[pon],ai[am]));
1819 ierr = PetscFreeSpaceGet(nnz,&free_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1819,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1820 current_space = free_space;
1821
1822 /* create and initialize a linked list */
1823 ierr = PetscTableCreate(A->cmap->n + a->B->cmap->N,aN,&ta);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1823,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1824 MatRowMergeMax_SeqAIJ(a_loc,am,ta){ PetscInt _j,_row,_nz,*_col; if (a_loc) { for (_row=0; _row<
am; _row++) { _nz = a_loc->i[_row+1] - a_loc->i[_row]; for
(_j=0; _j<_nz; _j++) { _col = _j + a_loc->j + a_loc->
i[_row]; PetscTableAdd(ta,*_col+1,1,INSERT_VALUES); } } } }
;
1825 ierr = PetscTableGetCount(ta,&Armax);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1825,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1826
1827 ierr = PetscLLCondensedCreate_Scalable(Armax,&lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1827,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1828
1829 for (i=0; i<pon; i++) {
1830 pnz = poti[i+1] - poti[i];
1831 ptJ = potj + poti[i];
1832 for (j=0; j<pnz; j++) {
1833 row = ptJ[j]; /* row of A_loc == col of Pot */
1834 anz = ai[row+1] - ai[row];
1835 Jptr = aj + ai[row];
1836 /* add non-zero cols of AP into the sorted linked list lnk */
1837 ierr = PetscLLCondensedAddSorted_Scalable(anz,Jptr,lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1837,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1838 }
1839 nnz = lnk[0];
1840
1841 /* If free space is not available, double the total space in the list */
1842 if (current_space->local_remaining<nnz) {
1843 ierr = PetscFreeSpaceGet(PetscIntSumTruncate(nnz,current_space->total_array_size),&current_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1843,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1844 nspacedouble++;
1845 }
1846
1847 /* Copy data into free space, and zero out denserows */
1848 ierr = PetscLLCondensedClean_Scalable(nnz,current_space->array,lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1848,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1849
1850 current_space->array += nnz;
1851 current_space->local_used += nnz;
1852 current_space->local_remaining -= nnz;
1853
1854 coi[i+1] = coi[i] + nnz;
1855 }
1856
1857 ierr = PetscMalloc1(coi[pon]+1,&coj)PetscMallocA(1,PETSC_FALSE,1857,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(coi[pon]+1)*sizeof(**(&coj)),(&coj))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1857,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1858 ierr = PetscFreeSpaceContiguous(&free_space,coj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1858,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1859 ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1859,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* must destroy to get a new one for C */
1860
1861 afill_tmp = (PetscReal)coi[pon]/(poti[pon] + ai[am]+1);
1862 if (afill_tmp > afill) afill = afill_tmp;
1863
1864 /* send j-array (coj) of Co to other processors */
1865 /*----------------------------------------------*/
1866 /* determine row ownership */
1867 ierr = PetscNew(&merge)PetscMallocA(1,PETSC_TRUE,1867,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(1)*sizeof(**((&merge))),((&merge)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1867,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1868 ierr = PetscLayoutCreate(comm,&merge->rowmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1868,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1869
1870 merge->rowmap->n = pn;
1871 merge->rowmap->bs = 1;
1872
1873 ierr = PetscLayoutSetUp(merge->rowmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1873,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1874 owners = merge->rowmap->range;
1875
1876 /* determine the number of messages to send, their lengths */
1877 ierr = PetscCalloc1(size,&len_si)PetscMallocA(1,PETSC_TRUE,1877,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(size)*sizeof(**(&len_si)),(&len_si))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1877,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1878 ierr = PetscMalloc1(size,&merge->len_s)PetscMallocA(1,PETSC_FALSE,1878,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(size)*sizeof(**(&merge->len_s)),(&merge->
len_s))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1878,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1879
1880 len_s = merge->len_s;
1881 merge->nsend = 0;
1882
1883 ierr = PetscMalloc1(size+2,&owners_co)PetscMallocA(1,PETSC_FALSE,1883,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(size+2)*sizeof(**(&owners_co)),(&owners_co)
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1883,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1884 ierr = PetscMemzero(len_s,size*sizeof(PetscMPIInt));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1884,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1885
1886 proc = 0;
1887 for (i=0; i<pon; i++) {
1888 while (prmap[i] >= owners[proc+1]) proc++;
1889 len_si[proc]++; /* num of rows in Co to be sent to [proc] */
1890 len_s[proc] += coi[i+1] - coi[i];
1891 }
1892
1893 len = 0; /* max length of buf_si[] */
1894 owners_co[0] = 0;
1895 for (proc=0; proc<size; proc++) {
1896 owners_co[proc+1] = owners_co[proc] + len_si[proc];
1897 if (len_si[proc]) {
1898 merge->nsend++;
1899 len_si[proc] = 2*(len_si[proc] + 1);
1900 len += len_si[proc];
1901 }
1902 }
1903
1904 /* determine the number and length of messages to receive for coi and coj */
1905 ierr = PetscGatherNumberOfMessages(comm,NULL((void*)0),len_s,&merge->nrecv);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1905,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1906 ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1906,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1907
1908 /* post the Irecv and Isend of coj */
1909 ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1909,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1910 ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rwaits);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1910,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1911 ierr = PetscMalloc1(merge->nsend+1,&swaits)PetscMallocA(1,PETSC_FALSE,1911,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(merge->nsend+1)*sizeof(**(&swaits)),(&swaits
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1911,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1912 for (proc=0, k=0; proc<size; proc++) {
1913 if (!len_s[proc]) continue;
1914 i = owners_co[proc];
1915 ierr = MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k)((petsc_isend_ct++,0) || PetscMPITypeSize(&(petsc_isend_len
),(len_s[proc]),(((MPI_Datatype)0x4c000405))) || MPI_Isend((coj
+coi[i]),(len_s[proc]),(((MPI_Datatype)0x4c000405)),(proc),(tagj
),(comm),(swaits+k)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1915,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1916 k++;
1917 }
1918
1919 /* receives and sends of coj are complete */
1920 ierr = PetscMalloc1(size,&sstatus)PetscMallocA(1,PETSC_FALSE,1920,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(size)*sizeof(**(&sstatus)),(&sstatus))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1920,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1921 for (i=0; i<merge->nrecv; i++) {
1922 PetscMPIInt icompleted;
1923 ierr = MPI_Waitany(merge->nrecv,rwaits,&icompleted,&rstatus)((petsc_wait_any_ct++,petsc_sum_of_waits_ct++,0) || MPI_Waitany
((merge->nrecv),(rwaits),(&icompleted),(&rstatus))
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1923,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1924 }
1925 ierr = PetscFree(rwaits)((*PetscTrFree)((void*)(rwaits),1925,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((rwaits) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1925,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1926 if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,swaits,sstatus)((petsc_wait_all_ct++,petsc_sum_of_waits_ct += (PetscLogDouble
) (merge->nsend),0) || MPI_Waitall((merge->nsend),(swaits
),(sstatus)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1926,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1927
1928 /* add received column indices into table to update Armax */
1929 /* Armax can be as large as aN if a P[row,:] is dense, see src/ksp/ksp/examples/tutorials/ex56.c! */
1930 for (k=0; k<merge->nrecv; k++) {/* k-th received message */
1931 Jptr = buf_rj[k];
1932 for (j=0; j<merge->len_r[k]; j++) {
1933 ierr = PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1933,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1934 }
1935 }
1936 ierr = PetscTableGetCount(ta,&Armax);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1936,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1937 /* printf("Armax %d, an %d + Bn %d = %d, aN %d\n",Armax,A->cmap->n,a->B->cmap->N,A->cmap->n+a->B->cmap->N,aN); */
1938
1939 /* send and recv coi */
1940 /*-------------------*/
1941 ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1941,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1942 ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&rwaits);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1942,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1943 ierr = PetscMalloc1(len+1,&buf_s)PetscMallocA(1,PETSC_FALSE,1943,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(len+1)*sizeof(**(&buf_s)),(&buf_s))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1943,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1944 buf_si = buf_s; /* points to the beginning of k-th msg to be sent */
1945 for (proc=0,k=0; proc<size; proc++) {
1946 if (!len_s[proc]) continue;
1947 /* form outgoing message for i-structure:
1948 buf_si[0]: nrows to be sent
1949 [1:nrows]: row index (global)
1950 [nrows+1:2*nrows+1]: i-structure index
1951 */
1952 /*-------------------------------------------*/
1953 nrows = len_si[proc]/2 - 1;
1954 buf_si_i = buf_si + nrows+1;
1955 buf_si[0] = nrows;
1956 buf_si_i[0] = 0;
1957 nrows = 0;
1958 for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1959 nzi = coi[i+1] - coi[i];
1960 buf_si_i[nrows+1] = buf_si_i[nrows] + nzi; /* i-structure */
1961 buf_si[nrows+1] = prmap[i] -owners[proc]; /* local row index */
1962 nrows++;
1963 }
1964 ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k)((petsc_isend_ct++,0) || PetscMPITypeSize(&(petsc_isend_len
),(len_si[proc]),(((MPI_Datatype)0x4c000405))) || MPI_Isend((
buf_si),(len_si[proc]),(((MPI_Datatype)0x4c000405)),(proc),(tagi
),(comm),(swaits+k)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1964,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1965 k++;
1966 buf_si += len_si[proc];
1967 }
1968 i = merge->nrecv;
1969 while (i--) {
1970 PetscMPIInt icompleted;
1971 ierr = MPI_Waitany(merge->nrecv,rwaits,&icompleted,&rstatus)((petsc_wait_any_ct++,petsc_sum_of_waits_ct++,0) || MPI_Waitany
((merge->nrecv),(rwaits),(&icompleted),(&rstatus))
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1971,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1972 }
1973 ierr = PetscFree(rwaits)((*PetscTrFree)((void*)(rwaits),1973,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((rwaits) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1973,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1974 if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,swaits,sstatus)((petsc_wait_all_ct++,petsc_sum_of_waits_ct += (PetscLogDouble
) (merge->nsend),0) || MPI_Waitall((merge->nsend),(swaits
),(sstatus)))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1974,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;}
1975 ierr = PetscFree(len_si)((*PetscTrFree)((void*)(len_si),1975,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((len_si) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1975,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1976 ierr = PetscFree(len_ri)((*PetscTrFree)((void*)(len_ri),1976,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((len_ri) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1976,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1977 ierr = PetscFree(swaits)((*PetscTrFree)((void*)(swaits),1977,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((swaits) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1977,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1978 ierr = PetscFree(sstatus)((*PetscTrFree)((void*)(sstatus),1978,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((sstatus) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1978,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1979 ierr = PetscFree(buf_s)((*PetscTrFree)((void*)(buf_s),1979,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((buf_s) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1979,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1980
1981 /* compute the local portion of C (mpi mat) */
1982 /*------------------------------------------*/
1983 /* allocate bi array and free space for accumulating nonzero column info */
1984 ierr = PetscMalloc1(pn+1,&bi)PetscMallocA(1,PETSC_FALSE,1984,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(pn+1)*sizeof(**(&bi)),(&bi))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1984,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1985 bi[0] = 0;
1986
1987 /* set initial free space to be fill*(nnz(P) + nnz(AP)) */
1988 nnz = PetscRealIntMultTruncate(fill,PetscIntSumTruncate(pdti[pn],PetscIntSumTruncate(poti[pon],ai[am])));
1989 ierr = PetscFreeSpaceGet(nnz,&free_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1989,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1990 current_space = free_space;
1991
1992 ierr = PetscMalloc3(merge->nrecv,&buf_ri_k,merge->nrecv,&nextrow,merge->nrecv,&nextci)PetscMallocA(3,PETSC_FALSE,1992,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(merge->nrecv)*sizeof(**(&buf_ri_k)),(&buf_ri_k
),(size_t)(merge->nrecv)*sizeof(**(&nextrow)),(&nextrow
),(size_t)(merge->nrecv)*sizeof(**(&nextci)),(&nextci
))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),1992,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
1993 for (k=0; k<merge->nrecv; k++) {
1994 buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1995 nrows = *buf_ri_k[k];
1996 nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */
1997 nextci[k] = buf_ri_k[k] + (nrows + 1); /* points to the next i-structure of k-th recieved i-structure */
1998 }
1999
2000 ierr = PetscLLCondensedCreate_Scalable(Armax,&lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2000,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2001 ierr = MatPreallocateInitialize(comm,pn,A->cmap->n,dnz,onz)0; { PetscErrorCode _4_ierr; PetscInt __nrows = (pn),__ncols =
(A->cmap->n),__rstart,__start,__end; _4_ierr = PetscMallocA
(2,PETSC_TRUE,2001,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)((size_t)__nrows)*sizeof(**(&dnz)),(&dnz),(size_t
)((size_t)__nrows)*sizeof(**(&onz)),(&onz));do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),2001
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = 0; __end
= __start; _4_ierr = MPI_Scan(&__ncols,&__end,1,((MPI_Datatype
)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (__builtin_expect
(!!(_4_ierr),0)) return PetscError(((MPI_Comm)0x44000001),2001
,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __start = __end
- __ncols; _4_ierr = MPI_Scan(&__nrows,&__rstart,1,(
(MPI_Datatype)0x4c000405),(MPI_Op)(0x58000003),comm);do {if (
__builtin_expect(!!(_4_ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2001,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0); __rstart = __rstart
- __nrows;
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2001,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2002 rmax = 0;
2003 for (i=0; i<pn; i++) {
2004 /* add pdt[i,:]*AP into lnk */
2005 pnz = pdti[i+1] - pdti[i];
2006 ptJ = pdtj + pdti[i];
2007 for (j=0; j<pnz; j++) {
2008 row = ptJ[j]; /* row of AP == col of Pt */
2009 anz = ai[row+1] - ai[row];
2010 Jptr = aj + ai[row];
2011 /* add non-zero cols of AP into the sorted linked list lnk */
2012 ierr = PetscLLCondensedAddSorted_Scalable(anz,Jptr,lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2012,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2013 }
2014
2015 /* add received col data into lnk */
2016 for (k=0; k<merge->nrecv; k++) { /* k-th received message */
2017 if (i == *nextrow[k]) { /* i-th row */
2018 nzi = *(nextci[k]+1) - *nextci[k];
2019 Jptr = buf_rj[k] + *nextci[k];
2020 ierr = PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2020,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2021 nextrow[k]++; nextci[k]++;
2022 }
2023 }
2024 nnz = lnk[0];
2025
2026 /* if free space is not available, make more free space */
2027 if (current_space->local_remaining<nnz) {
2028 ierr = PetscFreeSpaceGet(PetscIntSumTruncate(nnz,current_space->total_array_size),&current_space);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2028,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2029 nspacedouble++;
2030 }
2031 /* copy data into free space, then initialize lnk */
2032 ierr = PetscLLCondensedClean_Scalable(nnz,current_space->array,lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2032,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2033 ierr = MatPreallocateSet(i+owners[rank],nnz,current_space->array,dnz,onz)0;{ PetscInt __i; if (i+owners[rank] < __rstart) return PetscError
(((MPI_Comm)0x44000001),2033,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D less than first local row %D"
,i+owners[rank],__rstart); if (i+owners[rank] >= __rstart+
__nrows) return PetscError(((MPI_Comm)0x44000001),2033,__func__
,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,63,PETSC_ERROR_INITIAL,"Trying to set preallocation for row %D greater than last local row %D"
,i+owners[rank],__rstart+__nrows-1); for (__i=0; __i<nnz; __i
++) { if ((current_space->array)[__i] < __start || (current_space
->array)[__i] >= __end) onz[i+owners[rank] - __rstart]++
; else if (dnz[i+owners[rank] - __rstart] < __ncols) dnz[i
+owners[rank] - __rstart]++; }}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2033,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2034
2035 current_space->array += nnz;
2036 current_space->local_used += nnz;
2037 current_space->local_remaining -= nnz;
2038
2039 bi[i+1] = bi[i] + nnz;
2040 if (nnz > rmax) rmax = nnz;
2041 }
2042 ierr = PetscFree3(buf_ri_k,nextrow,nextci)PetscFreeA(3,2042,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(buf_ri_k),&(nextrow),&(nextci))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2042,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2043
2044 ierr = PetscMalloc1(bi[pn]+1,&bj)PetscMallocA(1,PETSC_FALSE,2044,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(bi[pn]+1)*sizeof(**(&bj)),(&bj))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2044,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2045 ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2045,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2046 afill_tmp = (PetscReal)bi[pn]/(pdti[pn] + poti[pon] + ai[am]+1);
2047 if (afill_tmp > afill) afill = afill_tmp;
2048 ierr = PetscLLCondensedDestroy_Scalable(lnk);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2048,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2049 ierr = PetscTableDestroy(&ta);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2049,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2050
2051 ierr = MatDestroy(&POt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2051,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2052 ierr = MatDestroy(&PDt);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2052,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2053
2054 /* create symbolic parallel matrix Cmpi - why cannot be assembled in Numeric part */
2055 /*----------------------------------------------------------------------------------*/
2056 ierr = PetscCalloc1(rmax+1,&vals)PetscMallocA(1,PETSC_TRUE,2056,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,(size_t)(rmax+1)*sizeof(**(&vals)),(&vals))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2056,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2057
2058 ierr = MatCreate(comm,&Cmpi);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2058,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2059 ierr = MatSetSizes(Cmpi,pn,A->cmap->n,PETSC_DETERMINE-1,PETSC_DETERMINE-1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2059,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2060 ierr = MatSetBlockSizes(Cmpi,PetscAbs(P->cmap->bs)(((P->cmap->bs) >= 0) ? (P->cmap->bs) : (-(P->
cmap->bs)))
,PetscAbs(A->cmap->bs)(((A->cmap->bs) >= 0) ? (A->cmap->bs) : (-(A->
cmap->bs)))
);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2060,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2061 ierr = MatGetType(A,&mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2061,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2062 ierr = MatSetType(Cmpi,mtype);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2062,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2063 ierr = MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2063,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2064 ierr = MatPreallocateFinalize(dnz,onz)0;_4_ierr = PetscFreeA(2,2064,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,&(dnz),&(onz));do {if (__builtin_expect(!!(_4_ierr),
0)) return PetscError(((MPI_Comm)0x44000001),2064,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,_4_ierr,PETSC_ERROR_REPEAT," ");} while (0);}
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2064,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2065 ierr = MatSetBlockSize(Cmpi,1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2065,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2066 for (i=0; i<pn; i++) {
2067 row = i + rstart;
2068 nnz = bi[i+1] - bi[i];
2069 Jptr = bj + bi[i];
2070 ierr = MatSetValues(Cmpi,1,&row,nnz,Jptr,vals,INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2070,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2071 }
2072 ierr = MatAssemblyBegin(Cmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2072,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2073 ierr = MatAssemblyEnd(Cmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2073,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2074 ierr = PetscFree(vals)((*PetscTrFree)((void*)(vals),2074,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
) || ((vals) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2074,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2075
2076 merge->bi = bi;
2077 merge->bj = bj;
2078 merge->coi = coi;
2079 merge->coj = coj;
2080 merge->buf_ri = buf_ri;
2081 merge->buf_rj = buf_rj;
2082 merge->owners_co = owners_co;
2083
2084 /* attach the supporting struct to Cmpi for reuse */
2085 c = (Mat_MPIAIJ*)Cmpi->data;
2086
2087 c->ap = ptap;
2088 ptap->api = NULL((void*)0);
2089 ptap->apj = NULL((void*)0);
2090 ptap->merge = merge;
2091 ptap->apa = NULL((void*)0);
2092 ptap->destroy = Cmpi->ops->destroy;
2093 ptap->duplicate = Cmpi->ops->duplicate;
2094
2095 Cmpi->ops->mattransposemultnumeric = MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ;
2096 Cmpi->ops->destroy = MatDestroy_MPIAIJ_PtAP;
2097 Cmpi->ops->freeintermediatedatastructures = MatFreeIntermediateDataStructures_MPIAIJ_AP;
2098
2099 *C = Cmpi;
2100#if defined(PETSC_USE_INFO1)
2101 if (bi[pn] != 0) {
2102 ierr = PetscInfo3(Cmpi,"Reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)afill)PetscInfo_Private(__func__,Cmpi,"Reallocs %D; Fill ratio: given %g needed %g.\n"
,nspacedouble,(double)fill,(double)afill)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2102,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2103 ierr = PetscInfo1(Cmpi,"Use MatTransposeMatMult(A,B,MatReuse,%g,&C) for best performance.\n",(double)afill)PetscInfo_Private(__func__,Cmpi,"Use MatTransposeMatMult(A,B,MatReuse,%g,&C) for best performance.\n"
,(double)afill)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2103,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2104 } else {
2105 ierr = PetscInfo(Cmpi,"Empty matrix product\n")PetscInfo_Private(__func__,Cmpi,"Empty matrix product\n");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),2105,__func__,"/sandbox/petsc/petsc.next-tmp/src/mat/impls/aij/mpi/mpimatmatmult.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
2106 }
2107#endif
2108 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
2109}