Bug Summary

File:mat/impls/aij/mpi/mmaij.c
Warning:line 24, column 14
Division by zero

Annotated Source Code

[?] Use j/k keys for keyboard navigation

/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c

1
2/*
3 Support for the parallel AIJ matrix vector multiply
4*/
5#include <../src/mat/impls/aij/mpi/mpiaij.h>
6#include <petsc/private/vecimpl.h>
7#include <petsc/private/isimpl.h> /* needed because accesses data structure of ISLocalToGlobalMapping directly */
8
9PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat mat)
10{
11 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
12 Mat_SeqAIJ *B = (Mat_SeqAIJ*)(aij->B->data);
13 PetscErrorCode ierr;
14 PetscInt i,j,*aj = B->j,ec = 0,*garray;
15 IS from,to;
16 Vec gvec;
17#if defined(PETSC_USE_CTABLE1)
18 PetscTable gid1_lid1;
19 PetscTablePosition tpos;
20 PetscInt gid,lid;
21#else
22 PetscInt N = mat->cmap->N,*indices;
23#endif
24
25 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
; petscstack->line[petscstack->currentsize] = 25; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
26 if (!aij->garray) {
1
Assuming the condition is true
2
Taking true branch
27#if defined(PETSC_USE_CTABLE1)
28 /* use a table */
29 ierr = PetscTableCreate(aij->B->rmap->n,mat->cmap->N+1,&gid1_lid1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),29,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
30 for (i=0; i<aij->B->rmap->n; i++) {
3
Assuming the condition is true
4
Loop condition is true. Entering loop body
7
Assuming the condition is true
8
Loop condition is true. Entering loop body
11
Assuming the condition is true
12
Loop condition is true. Entering loop body
15
Assuming the condition is true
16
Loop condition is true. Entering loop body
31 for (j=0; j<B->ilen[i]; j++) {
5
Assuming the condition is false
6
Loop condition is false. Execution continues on line 30
9
Assuming the condition is false
10
Loop condition is false. Execution continues on line 30
13
Assuming the condition is false
14
Loop condition is false. Execution continues on line 30
17
Assuming the condition is true
18
Loop condition is true. Entering loop body
32 PetscInt data,gid1 = aj[B->i[i] + j] + 1;
33 ierr = PetscTableFind(gid1_lid1,gid1,&data);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),33,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
34 if (!data) {
19
Taking true branch
35 /* one based table */
36 ierr = PetscTableAdd(gid1_lid1,gid1,++ec,INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),36,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
20
Calling 'PetscTableAdd'
37 }
38 }
39 }
40 /* form array of columns we need */
41 ierr = PetscMalloc1(ec+1,&garray)PetscMallocA(1,PETSC_FALSE,41,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(ec+1)*sizeof(**(&garray)),(&garray))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),41,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
42 ierr = PetscTableGetHeadPosition(gid1_lid1,&tpos);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),42,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
43 while (tpos) {
44 ierr = PetscTableGetNext(gid1_lid1,&tpos,&gid,&lid);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),44,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
45 gid--;
46 lid--;
47 garray[lid] = gid;
48 }
49 ierr = PetscSortInt(ec,garray);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),49,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* sort, and rebuild */
50 ierr = PetscTableRemoveAll(gid1_lid1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),50,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
51 for (i=0; i<ec; i++) {
52 ierr = PetscTableAdd(gid1_lid1,garray[i]+1,i+1,INSERT_VALUES);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),52,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
53 }
54 /* compact out the extra columns in B */
55 for (i=0; i<aij->B->rmap->n; i++) {
56 for (j=0; j<B->ilen[i]; j++) {
57 PetscInt gid1 = aj[B->i[i] + j] + 1;
58 ierr = PetscTableFind(gid1_lid1,gid1,&lid);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),58,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
59 lid--;
60 aj[B->i[i] + j] = lid;
61 }
62 }
63 aij->B->cmap->n = aij->B->cmap->N = ec;
64 aij->B->cmap->bs = 1;
65
66 ierr = PetscLayoutSetUp((aij->B->cmap));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),66,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
67 ierr = PetscTableDestroy(&gid1_lid1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),67,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
68#else
69 /* Make an array as long as the number of columns */
70 /* mark those columns that are in aij->B */
71 ierr = PetscCalloc1(N+1,&indices)PetscMallocA(1,PETSC_TRUE,71,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(N+1)*sizeof(**(&indices)),(&indices))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),71,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
72 for (i=0; i<aij->B->rmap->n; i++) {
73 for (j=0; j<B->ilen[i]; j++) {
74 if (!indices[aj[B->i[i] + j]]) ec++;
75 indices[aj[B->i[i] + j]] = 1;
76 }
77 }
78
79 /* form array of columns we need */
80 ierr = PetscMalloc1(ec+1,&garray)PetscMallocA(1,PETSC_FALSE,80,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(ec+1)*sizeof(**(&garray)),(&garray))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),80,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
81 ec = 0;
82 for (i=0; i<N; i++) {
83 if (indices[i]) garray[ec++] = i;
84 }
85
86 /* make indices now point into garray */
87 for (i=0; i<ec; i++) {
88 indices[garray[i]] = i;
89 }
90
91 /* compact out the extra columns in B */
92 for (i=0; i<aij->B->rmap->n; i++) {
93 for (j=0; j<B->ilen[i]; j++) {
94 aj[B->i[i] + j] = indices[aj[B->i[i] + j]];
95 }
96 }
97 aij->B->cmap->n = aij->B->cmap->N = ec;
98 aij->B->cmap->bs = 1;
99
100 ierr = PetscLayoutSetUp((aij->B->cmap));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),100,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
101 ierr = PetscFree(indices)((*PetscTrFree)((void*)(indices),101,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
) || ((indices) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),101,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
102#endif
103 } else {
104 garray = aij->garray;
105 }
106
107 if (!aij->lvec) {
108 /* create local vector that is used to scatter into */
109 ierr = VecCreateSeq(PETSC_COMM_SELF((MPI_Comm)0x44000001),ec,&aij->lvec);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),109,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
110 } else {
111 ierr = VecGetSize(aij->lvec,&ec);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),111,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
112 }
113
114 /* create two temporary Index sets for build scatter gather */
115 ierr = ISCreateGeneral(PETSC_COMM_SELF((MPI_Comm)0x44000001),ec,garray,PETSC_COPY_VALUES,&from);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),115,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
116 ierr = ISCreateStride(PETSC_COMM_SELF((MPI_Comm)0x44000001),ec,0,1,&to);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),116,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
117
118 /* create temporary global vector to generate scatter context */
119 /* This does not allocate the array's memory so is efficient */
120 ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)mat),1,mat->cmap->n,mat->cmap->N,NULL((void*)0),&gvec);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),120,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
121
122 /* generate the scatter context */
123 if (aij->Mvctx_mpi1_flg) {
124 ierr = VecScatterDestroy(&aij->Mvctx_mpi1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),124,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
125 ierr = VecScatterCreate(gvec,from,aij->lvec,to,&aij->Mvctx_mpi1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),125,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
126 ierr = VecScatterSetType(aij->Mvctx_mpi1,VECSCATTERMPI1"mpi1");CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),126,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
127 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)aij->Mvctx_mpi1);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),127,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
128 } else {
129 ierr = VecScatterDestroy(&aij->Mvctx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),129,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
130 ierr = VecScatterCreate(gvec,from,aij->lvec,to,&aij->Mvctx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),130,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
131 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)aij->Mvctx);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),131,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
132 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)aij->lvec);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),132,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
133 ierr = PetscLogObjectMemory((PetscObject)mat,(ec+1)*sizeof(PetscInt));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),133,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
134 }
135 aij->garray = garray;
136
137 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)from);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),137,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
138 ierr = PetscLogObjectParent((PetscObject)mat,(PetscObject)to);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),138,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
139
140 ierr = ISDestroy(&from);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),140,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
141 ierr = ISDestroy(&to);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),141,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
142 ierr = VecDestroy(&gvec);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),142,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
143 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
144}
145
146/*
147 Takes the local part of an already assembled MPIAIJ matrix
148 and disassembles it. This is to allow new nonzeros into the matrix
149 that require more communication in the matrix vector multiply.
150 Thus certain data-structures must be rebuilt.
151
152 Kind of slow! But that's what application programmers get when
153 they are sloppy.
154*/
155PetscErrorCode MatDisAssemble_MPIAIJ(Mat A)
156{
157 Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data;
158 Mat B = aij->B,Bnew;
159 Mat_SeqAIJ *Baij = (Mat_SeqAIJ*)B->data;
160 PetscErrorCode ierr;
161 PetscInt i,j,m = B->rmap->n,n = A->cmap->N,col,ct = 0,*garray = aij->garray,*nz,ec;
162 PetscScalar v;
163
164 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
; petscstack->line[petscstack->currentsize] = 164; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
165 /* free stuff related to matrix-vec multiply */
166 ierr = VecGetSize(aij->lvec,&ec);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),166,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
; /* needed for PetscLogObjectMemory below */
167 ierr = VecDestroy(&aij->lvec);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),167,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
168 if (aij->colmap) {
169#if defined(PETSC_USE_CTABLE1)
170 ierr = PetscTableDestroy(&aij->colmap);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),170,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
171#else
172 ierr = PetscFree(aij->colmap)((*PetscTrFree)((void*)(aij->colmap),172,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
) || ((aij->colmap) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),172,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
173 ierr = PetscLogObjectMemory((PetscObject)A,-aij->B->cmap->n*sizeof(PetscInt));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),173,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
174#endif
175 }
176
177 /* make sure that B is assembled so we can access its values */
178 ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),178,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
179 ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),179,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
180
181 /* invent new B and copy stuff over */
182 ierr = PetscMalloc1(m+1,&nz)PetscMallocA(1,PETSC_FALSE,182,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(m+1)*sizeof(**(&nz)),(&nz))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),182,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
183 for (i=0; i<m; i++) {
184 nz[i] = Baij->i[i+1] - Baij->i[i];
185 }
186 ierr = MatCreate(PETSC_COMM_SELF((MPI_Comm)0x44000001),&Bnew);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),186,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
187 ierr = MatSetSizes(Bnew,m,n,m,n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),187,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
188 ierr = MatSetBlockSizesFromMats(Bnew,A,A);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),188,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
189 ierr = MatSetType(Bnew,((PetscObject)B)->type_name);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),189,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
190 ierr = MatSeqAIJSetPreallocation(Bnew,0,nz);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),190,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
191
192 if (Baij->nonew >= 0) { /* Inherit insertion error options (if positive). */
193 ((Mat_SeqAIJ*)Bnew->data)->nonew = Baij->nonew;
194 }
195
196 /*
197 Ensure that B's nonzerostate is monotonically increasing.
198 Or should this follow the MatSetValues() loop to preserve B's nonzerstate across a MatDisAssemble() call?
199 */
200 Bnew->nonzerostate = B->nonzerostate;
201
202 ierr = PetscFree(nz)((*PetscTrFree)((void*)(nz),202,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
) || ((nz) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),202,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
203 for (i=0; i<m; i++) {
204 for (j=Baij->i[i]; j<Baij->i[i+1]; j++) {
205 col = garray[Baij->j[ct]];
206 v = Baij->a[ct++];
207 ierr = MatSetValues(Bnew,1,&i,1,&col,&v,B->insertmode);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),207,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
208 }
209 }
210 ierr = PetscFree(aij->garray)((*PetscTrFree)((void*)(aij->garray),210,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
) || ((aij->garray) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),210,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
211 ierr = PetscLogObjectMemory((PetscObject)A,-ec*sizeof(PetscInt));CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),211,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
212 ierr = MatDestroy(&B);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),212,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
213 ierr = PetscLogObjectParent((PetscObject)A,(PetscObject)Bnew);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),213,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
214
215 aij->B = Bnew;
216 A->was_assembled = PETSC_FALSE;
217 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
218}
219
220/* ugly stuff added for Glenn someday we should fix this up */
221
222static PetscInt *auglyrmapd = 0,*auglyrmapo = 0; /* mapping from the local ordering to the "diagonal" and "off-diagonal" parts of the local matrix */
223static Vec auglydd = 0,auglyoo = 0; /* work vectors used to scale the two parts of the local matrix */
224
225
226PetscErrorCode MatMPIAIJDiagonalScaleLocalSetUp(Mat inA,Vec scale)
227{
228 Mat_MPIAIJ *ina = (Mat_MPIAIJ*) inA->data; /*access private part of matrix */
229 PetscErrorCode ierr;
230 PetscInt i,n,nt,cstart,cend,no,*garray = ina->garray,*lindices;
231 PetscInt *r_rmapd,*r_rmapo;
232
233 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
; petscstack->line[petscstack->currentsize] = 233; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
234 ierr = MatGetOwnershipRange(inA,&cstart,&cend);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),234,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
235 ierr = MatGetSize(ina->A,NULL((void*)0),&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),235,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
236 ierr = PetscCalloc1(inA->rmap->mapping->n+1,&r_rmapd)PetscMallocA(1,PETSC_TRUE,236,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(inA->rmap->mapping->n+1)*sizeof(**(&r_rmapd
)),(&r_rmapd))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),236,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
237 nt = 0;
238 for (i=0; i<inA->rmap->mapping->n; i++) {
239 if (inA->rmap->mapping->indices[i] >= cstart && inA->rmap->mapping->indices[i] < cend) {
240 nt++;
241 r_rmapd[i] = inA->rmap->mapping->indices[i] + 1;
242 }
243 }
244 if (nt != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hmm nt %D n %D",nt,n)return PetscError(((MPI_Comm)0x44000001),244,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,77,PETSC_ERROR_INITIAL,"Hmm nt %D n %D",nt,n)
;
245 ierr = PetscMalloc1(n+1,&auglyrmapd)PetscMallocA(1,PETSC_FALSE,245,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(n+1)*sizeof(**(&auglyrmapd)),(&auglyrmapd))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),245,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
246 for (i=0; i<inA->rmap->mapping->n; i++) {
247 if (r_rmapd[i]) {
248 auglyrmapd[(r_rmapd[i]-1)-cstart] = i;
249 }
250 }
251 ierr = PetscFree(r_rmapd)((*PetscTrFree)((void*)(r_rmapd),251,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
) || ((r_rmapd) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),251,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
252 ierr = VecCreateSeq(PETSC_COMM_SELF((MPI_Comm)0x44000001),n,&auglydd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),252,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
253
254 ierr = PetscCalloc1(inA->cmap->N+1,&lindices)PetscMallocA(1,PETSC_TRUE,254,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(inA->cmap->N+1)*sizeof(**(&lindices)),(&
lindices))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),254,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
255 for (i=0; i<ina->B->cmap->n; i++) {
256 lindices[garray[i]] = i+1;
257 }
258 no = inA->rmap->mapping->n - nt;
259 ierr = PetscCalloc1(inA->rmap->mapping->n+1,&r_rmapo)PetscMallocA(1,PETSC_TRUE,259,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(inA->rmap->mapping->n+1)*sizeof(**(&r_rmapo
)),(&r_rmapo))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),259,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
260 nt = 0;
261 for (i=0; i<inA->rmap->mapping->n; i++) {
262 if (lindices[inA->rmap->mapping->indices[i]]) {
263 nt++;
264 r_rmapo[i] = lindices[inA->rmap->mapping->indices[i]];
265 }
266 }
267 if (nt > no) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hmm nt %D no %D",nt,n)return PetscError(((MPI_Comm)0x44000001),267,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,77,PETSC_ERROR_INITIAL,"Hmm nt %D no %D",nt,n)
;
268 ierr = PetscFree(lindices)((*PetscTrFree)((void*)(lindices),268,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
) || ((lindices) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),268,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
269 ierr = PetscMalloc1(nt+1,&auglyrmapo)PetscMallocA(1,PETSC_FALSE,269,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,(size_t)(nt+1)*sizeof(**(&auglyrmapo)),(&auglyrmapo)
)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),269,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
270 for (i=0; i<inA->rmap->mapping->n; i++) {
271 if (r_rmapo[i]) {
272 auglyrmapo[(r_rmapo[i]-1)] = i;
273 }
274 }
275 ierr = PetscFree(r_rmapo)((*PetscTrFree)((void*)(r_rmapo),275,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
) || ((r_rmapo) = 0,0))
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),275,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
276 ierr = VecCreateSeq(PETSC_COMM_SELF((MPI_Comm)0x44000001),nt,&auglyoo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),276,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
277 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
278}
279
280PetscErrorCode MatMPIAIJDiagonalScaleLocal(Mat A,Vec scale)
281{
282 /* This routine should really be abandoned as it duplicates MatDiagonalScaleLocal */
283 PetscErrorCode ierr;
284
285 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
; petscstack->line[petscstack->currentsize] = 285; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
286 ierr = PetscTryMethod(A,"MatDiagonalScaleLocal_C",(Mat,Vec),(A,scale))0; do { PetscErrorCode (*f)(Mat,Vec), __ierr; __ierr = PetscObjectQueryFunction_Private
(((PetscObject)A),("MatDiagonalScaleLocal_C"),(PetscVoidFunction
*)(&f));do {if (__builtin_expect(!!(__ierr),0)) return PetscError
(((MPI_Comm)0x44000001),286,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,__ierr,PETSC_ERROR_REPEAT," ");} while (0); if (f) {__ierr =
(*f)(A,scale);do {if (__builtin_expect(!!(__ierr),0)) return
PetscError(((MPI_Comm)0x44000001),286,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,__ierr,PETSC_ERROR_REPEAT," ");} while (0);} } while(0)
;CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),286,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
287 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
288}
289
290PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat A,Vec scale)
291{
292 Mat_MPIAIJ *a = (Mat_MPIAIJ*) A->data; /*access private part of matrix */
293 PetscErrorCode ierr;
294 PetscInt n,i;
295 PetscScalar *d,*o;
296 const PetscScalar *s;
297
298 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
; petscstack->line[petscstack->currentsize] = 298; petscstack
->petscroutine[petscstack->currentsize] = PETSC_TRUE; petscstack
->currentsize++; } if (petscstack) { petscstack->hotdepth
+= (PETSC_FALSE || petscstack->hotdepth); } ; } while (0)
; ; } while (0)
;
299 if (!auglyrmapd) {
300 ierr = MatMPIAIJDiagonalScaleLocalSetUp(A,scale);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),300,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
301 }
302
303 ierr = VecGetArrayRead(scale,&s);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),303,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
304
305 ierr = VecGetLocalSize(auglydd,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),305,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
306 ierr = VecGetArray(auglydd,&d);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),306,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
307 for (i=0; i<n; i++) {
308 d[i] = s[auglyrmapd[i]]; /* copy "diagonal" (true local) portion of scale into dd vector */
309 }
310 ierr = VecRestoreArray(auglydd,&d);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),310,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
311 /* column scale "diagonal" portion of local matrix */
312 ierr = MatDiagonalScale(a->A,NULL((void*)0),auglydd);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),312,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
313
314 ierr = VecGetLocalSize(auglyoo,&n);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),314,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
315 ierr = VecGetArray(auglyoo,&o);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),315,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
316 for (i=0; i<n; i++) {
317 o[i] = s[auglyrmapo[i]]; /* copy "off-diagonal" portion of scale into oo vector */
318 }
319 ierr = VecRestoreArrayRead(scale,&s);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),319,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
320 ierr = VecRestoreArray(auglyoo,&o);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),320,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
321 /* column scale "off-diagonal" portion of local matrix */
322 ierr = MatDiagonalScale(a->B,NULL((void*)0),auglyoo);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),322,__func__,"/sandbox/petsc/petsc.master/src/mat/impls/aij/mpi/mmaij.c"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
323 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
324}

/sandbox/petsc/petsc.master/include/petscctable.h

1#ifndef __PETSCCTABLE_H
2#define __PETSCCTABLE_H
3#include <petscsys.h>
4
5struct _n_PetscTable {
6 PetscInt *keytable;
7 PetscInt *table;
8 PetscInt count;
9 PetscInt tablesize;
10 PetscInt head;
11 PetscInt maxkey; /* largest key allowed */
12};
13
14typedef struct _n_PetscTable* PetscTable;
15typedef PetscInt* PetscTablePosition;
16
17PETSC_STATIC_INLINEstatic inline unsigned long PetscHash(PetscTable ta,unsigned long x)
18{
19 return(x%(unsigned long)ta->tablesize);
20}
21
22PETSC_STATIC_INLINEstatic inline unsigned long PetscHashStep(PetscTable ta,unsigned long x)
23{
24 return(1+(x%(unsigned long)(ta->tablesize-1)));
22
Division by zero
25}
26
27PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableCreate(const PetscInt,PetscInt,PetscTable*);
28PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableCreateCopy(const PetscTable,PetscTable*);
29PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableDestroy(PetscTable*);
30PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableGetCount(const PetscTable,PetscInt*);
31PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableIsEmpty(const PetscTable,PetscInt*);
32PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableAddExpand(PetscTable,PetscInt,PetscInt,InsertMode);
33PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableAddCountExpand(PetscTable,PetscInt);
34PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableGetHeadPosition(PetscTable,PetscTablePosition*);
35PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableGetNext(PetscTable,PetscTablePosition*,PetscInt*,PetscInt*);
36PETSC_EXTERNextern __attribute__((visibility ("default"))) PetscErrorCode PetscTableRemoveAll(PetscTable);
37
38PETSC_STATIC_INLINEstatic inline PetscErrorCode PetscTableAdd(PetscTable ta,PetscInt key,PetscInt data,InsertMode imode)
39{
40 PetscErrorCode ierr;
41 PetscInt i,hash = (PetscInt)PetscHash(ta,(unsigned long)key);
42 PetscInt hashstep = (PetscInt)PetscHashStep(ta,(unsigned long)key);
21
Calling 'PetscHashStep'
43
44 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.master/include/petscctable.h"; petscstack
->line[petscstack->currentsize] = 44; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
45 if (key <= 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"key <= 0")return PetscError(((MPI_Comm)0x44000001),45,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,63,PETSC_ERROR_INITIAL,"key <= 0")
;
46 if (key > ta->maxkey) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"key %D is greater than largest key allowed %D",key,ta->maxkey)return PetscError(((MPI_Comm)0x44000001),46,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,63,PETSC_ERROR_INITIAL,"key %D is greater than largest key allowed %D"
,key,ta->maxkey)
;
47 if (!data) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Null data")return PetscError(((MPI_Comm)0x44000001),47,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,63,PETSC_ERROR_INITIAL,"Null data")
;
48
49 for (i=0; i<ta->tablesize; i++) {
50 if (ta->keytable[hash] == key) {
51 switch (imode) {
52 case INSERT_VALUES:
53 ta->table[hash] = data; /* over write */
54 break;
55 case ADD_VALUES:
56 ta->table[hash] += data;
57 break;
58 case MAX_VALUES:
59 ta->table[hash] = PetscMax(ta->table[hash],data)(((ta->table[hash])<(data)) ? (data) : (ta->table[hash
]))
;
60 break;
61 case NOT_SET_VALUES:
62 case INSERT_ALL_VALUES:
63 case ADD_ALL_VALUES:
64 case INSERT_BC_VALUES:
65 case ADD_BC_VALUES:
66 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported InsertMode")return PetscError(((MPI_Comm)0x44000001),66,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,56,PETSC_ERROR_INITIAL,"Unsupported InsertMode")
;
67 }
68 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
69 } else if (!ta->keytable[hash]) {
70 if (ta->count < 5*(ta->tablesize/6) - 1) {
71 ta->count++; /* add */
72 ta->keytable[hash] = key;
73 ta->table[hash] = data;
74 } else {
75 ierr = PetscTableAddExpand(ta,key,data,imode);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),75,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
76 }
77 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
78 }
79 hash = (hash + hashstep)%ta->tablesize;
80 }
81 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_COR,"Full table")return PetscError(((MPI_Comm)0x44000001),81,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,74,PETSC_ERROR_INITIAL,"Full table")
;
82 /* PetscFunctionReturn(0); */
83}
84
85PETSC_STATIC_INLINEstatic inline PetscErrorCode PetscTableAddCount(PetscTable ta,PetscInt key)
86{
87 PetscErrorCode ierr;
88 PetscInt i,hash = (PetscInt)PetscHash(ta,(unsigned long)key);
89 PetscInt hashstep = (PetscInt)PetscHashStep(ta,(unsigned long)key);
90
91 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.master/include/petscctable.h"; petscstack
->line[petscstack->currentsize] = 91; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
92 if (key <= 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"key <= 0")return PetscError(((MPI_Comm)0x44000001),92,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,63,PETSC_ERROR_INITIAL,"key <= 0")
;
93 if (key > ta->maxkey) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"key %D is greater than largest key allowed %D",key,ta->maxkey)return PetscError(((MPI_Comm)0x44000001),93,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,63,PETSC_ERROR_INITIAL,"key %D is greater than largest key allowed %D"
,key,ta->maxkey)
;
94
95 for (i=0; i<ta->tablesize; i++) {
96 if (ta->keytable[hash] == key) {
97 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
98 } else if (!ta->keytable[hash]) {
99 if (ta->count < 5*(ta->tablesize/6) - 1) {
100 ta->count++; /* add */
101 ta->keytable[hash] = key;
102 ta->table[hash] = ta->count;
103 } else {
104 ierr = PetscTableAddCountExpand(ta,key);CHKERRQ(ierr)do {if (__builtin_expect(!!(ierr),0)) return PetscError(((MPI_Comm
)0x44000001),104,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,ierr,PETSC_ERROR_REPEAT," ");} while (0)
;
105 }
106 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
107 }
108 hash = (hash + hashstep)%ta->tablesize;
109 }
110 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_COR,"Full table")return PetscError(((MPI_Comm)0x44000001),110,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,74,PETSC_ERROR_INITIAL,"Full table")
;
111 /* PetscFunctionReturn(0); */
112}
113
114/*
115 PetscTableFind - checks if a key is in the table
116
117 If data==0, then no table entry exists.
118
119*/
120PETSC_STATIC_INLINEstatic inline PetscErrorCode PetscTableFind(PetscTable ta,PetscInt key,PetscInt *data)
121{
122 PetscInt ii = 0;
123 PetscInt hash = (PetscInt)PetscHash(ta,(unsigned long)key);
124 PetscInt hashstep = (PetscInt)PetscHashStep(ta,(unsigned long)key);
125
126 PetscFunctionBegindo { do { ; if (petscstack && (petscstack->currentsize
< 64)) { petscstack->function[petscstack->currentsize
] = __func__; petscstack->file[petscstack->currentsize]
= "/sandbox/petsc/petsc.master/include/petscctable.h"; petscstack
->line[petscstack->currentsize] = 126; petscstack->petscroutine
[petscstack->currentsize] = PETSC_TRUE; petscstack->currentsize
++; } if (petscstack) { petscstack->hotdepth += (PETSC_FALSE
|| petscstack->hotdepth); } ; } while (0); ; } while (0)
;
127 *data = 0;
128 if (key <= 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Key <= 0")return PetscError(((MPI_Comm)0x44000001),128,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,63,PETSC_ERROR_INITIAL,"Key <= 0")
;
129 if (key > ta->maxkey) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"key %D is greater than largest key allowed %D",key,ta->maxkey)return PetscError(((MPI_Comm)0x44000001),129,__func__,"/sandbox/petsc/petsc.master/include/petscctable.h"
,63,PETSC_ERROR_INITIAL,"key %D is greater than largest key allowed %D"
,key,ta->maxkey)
;
130
131 while (ii++ < ta->tablesize) {
132 if (!ta->keytable[hash]) break;
133 else if (ta->keytable[hash] == key) {
134 *data = ta->table[hash];
135 break;
136 }
137 hash = (hash + hashstep)%ta->tablesize;
138 }
139 PetscFunctionReturn(0)do { do { ; if (petscstack && petscstack->currentsize
> 0) { petscstack->currentsize--; petscstack->function
[petscstack->currentsize] = 0; petscstack->file[petscstack
->currentsize] = 0; petscstack->line[petscstack->currentsize
] = 0; petscstack->petscroutine[petscstack->currentsize
] = PETSC_FALSE; } if (petscstack) { petscstack->hotdepth =
(((petscstack->hotdepth-1)<(0)) ? (0) : (petscstack->
hotdepth-1)); } ; } while (0); return(0);} while (0)
;
140}
141
142#endif