Actual source code: gvec2d.c
1: #ifdef PETSC_RCS_HEADER
2: static char vcid[] = "$Id: gvec2d.c,v 1.22 2000/10/08 00:27:05 knepley Exp $";
3: #endif
5: /* Implements FE vectors derived from 2d triangular grids */
6: #include "petscsles.h" /* For ALE Operators */
7: #include "src/gvec/gvecimpl.h" /*I "gvec.h" I*/
8: #include "src/mesh/impls/triangular/2d/2dimpl.h"
9: #include "gvec2d.h"
11: int GVecGetLocalGVec_Triangular_2D(GVec g, GVec *gvec) {
12: SETERRQ(PETSC_ERR_SUP, " ");
13: }
15: int GVecRestoreLocalGVec_Triangular_2D(GVec g, GVec *gvec) {
16: SETERRQ(PETSC_ERR_SUP, " ");
17: }
19: int GVecGlobalToLocal_Triangular_2D(GVec g, InsertMode mode, GVec l) {
20: SETERRQ(PETSC_ERR_SUP, " ");
21: }
23: int GVecLocalToGlobal_Triangular_2D(GVec l, InsertMode mode, GVec g) {
24: SETERRQ(PETSC_ERR_SUP, " ");
25: }
27: int GVecEvaluateFunction_Triangular_2D(Grid grid, GVec v, VarOrdering order, PointFunction f, PetscScalar alpha, void *ctx)
28: {
29: Mesh_Triangular *tri = (Mesh_Triangular *) grid->mesh->data;
30: Partition_Triangular_2D *q = (Partition_Triangular_2D *) grid->mesh->part->data;
31: double *nodes = tri->nodes;
32: int **localStart = order->localStart;
33: FieldClassMap map;
34: int numNodes, numFields;
35: int *fields, **fieldClasses, *classes, *classSizes;
36: int nodeVars;
37: PetscScalar *array;
38: int size, locSize, overlapSize;
39: int fieldIdx, field, node, nclass, count;
40: int ierr;
43: VarOrderingGetClassMap(order, &map);
44: numNodes = map->numNodes;
45: numFields = map->numFields;
46: fields = map->fields;
47: fieldClasses = map->fieldClasses;
48: classes = map->classes;
49: classSizes = map->classSizes;
50: /* Check for a locally ghosted vector */
51: VecGetArray(v, &array);
52: VecGetLocalSize(v, &locSize);
53: /* VecGetGhostSize(v, &overlapSize); */
54: overlapSize = locSize + ((Vec_MPI *) v->data)->nghost;
55: size = order->numLocVars;
56: #if 0
57: /* This doesn't work for constrained vector since it conflicts with the check on count
58: as variables generated by constraints are not handled here
59: */
60: if (locSize != order->numLocVars)
61: SETERRQ2(PETSC_ERR_ARG_WRONG, "Wrong vector size %d should be %d", locSize, order->numLocVars);
62: #endif
63: if (overlapSize > locSize) {
64: numNodes = q->numOverlapNodes;
65: size = order->numOverlapVars;
66: if (overlapSize != order->numOverlapVars)
67: SETERRQ(PETSC_ERR_ARG_WRONG, "Wrong size for vector");
68: }
69: for(node = 0, count = 0; node < numNodes; node++, count += nodeVars)
70: {
71: nclass = classes[node];
72: nodeVars = classSizes[nclass];
73: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++)
74: {
75: field = fields[fieldIdx];
76: if (fieldClasses[fieldIdx][nclass] == 0)
77: continue;
78: (*f)(1, grid->fields[field].numComp, &nodes[node*2], &nodes[node*2+1], PETSC_NULL,
79: &array[count+localStart[field][nclass]], ctx);
80:
81: }
82: }
83: if (count != size)
84: SETERRQ(PETSC_ERR_PLIB, "Invalid variable offset records");
85: VecRestoreArray(v, &array);
86: VecScale(&alpha, v);
87: return(0);
88: }
90: int GVecEvaluateFunctionBoundary_Triangular_2D(Grid grid, GVec v, int bd, VarOrdering order, PointFunction func,
91: PetscScalar alpha, void *ctx)
92: {
93: Mesh mesh = grid->mesh;
94: int **localStart = order->localStart;
95: int *offsets = order->offsets;
96: int *localOffsets = order->localOffsets;
97: int firstVar = order->firstVar[mesh->part->rank];
98: FieldClassMap map;
99: int numNodes, numFields;
100: int *fields, **fieldClasses;
101: PetscScalar *array; /* The local vector values */
102: double x, y, z;
103: int f, field, node, nclass, row;
104: int ierr;
107: VarOrderingGetClassMap(order, &map);
108: numNodes = map->numNodes;
109: numFields = map->numFields;
110: fields = map->fields;
111: fieldClasses = map->fieldClasses;
112: /* Loop over boundary nodes */
113: VecGetArray(v, &array);
114: for(f = 0; f < numFields; f++) {
115: field = fields[f];
116: (*grid->ops->getboundarystart)(grid, bd, f, PETSC_FALSE, map, &node, &nclass);
117: while(node >= 0) {
118: if (node >= numNodes)
119: row = localOffsets[node-numNodes];
120: else
121: row = offsets[node] - firstVar + localStart[field][nclass];
122: MeshGetNodeCoords(mesh, node, &x, &y, &z);
123: if (fieldClasses[f][nclass] != 0) {
124: (*func)(1, grid->fields[field].numComp, &x, &y, &z, &array[row], ctx);
125: }
126: (*grid->ops->getboundarynext)(grid, bd, f, PETSC_FALSE, map, &node, &nclass);
127: #ifdef PETSC_USE_BOPT_g
128: #endif
129: }
130: }
131: VecRestoreArray(v, &array);
132: VecScale(&alpha, v);
133: return(0);
134: }
136: int GVecEvaluateFunctionCollective_Triangular_2D(Grid grid, GVec v, VarOrdering order, PointFunction f, PetscScalar alpha,
137: void *ctx)
138: {
139: Mesh_Triangular *tri = (Mesh_Triangular *) grid->mesh->data;
140: double *nodes = tri->nodes;
141: int **localStart = order->localStart;
142: FieldClassMap map;
143: int numNodes, numFields;
144: int *fields, **fieldClasses, *classes, *classSizes;
145: int nodeVars, comp;
146: PetscScalar *array;
147: int maxNodes; /* The most nodes in any domain */
148: int fieldIdx, field, node, nclass, count;
149: int ierr;
152: VarOrderingGetClassMap(order, &map);
153: numNodes = map->numNodes;
154: numFields = map->numFields;
155: fields = map->fields;
156: fieldClasses = map->fieldClasses;
157: classes = map->classes;
158: classSizes = map->classSizes;
159: MPI_Allreduce(&numNodes, &maxNodes, 1, MPI_INT, MPI_MAX, grid->comm);
160: VecGetArray(v, &array);
161: for(node = 0, count = 0; node < maxNodes; node++) {
162: if (node < numNodes) {
163: nclass = classes[node];
164: nodeVars = classSizes[nclass];
165: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
166: field = fields[fieldIdx];
167: comp = grid->fields[field].numComp;
168: if (fieldClasses[fieldIdx][nclass] == 0) {
169: /* We have to make sure that every processor is available at each iteration */
170: (*f)(0, 0, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL, ctx);
171: continue;
172: }
173: (*f)(1, comp, &nodes[node*2], &nodes[node*2+1], PETSC_NULL, &array[count+localStart[field][nclass]], ctx);
174:
175: }
176: count += nodeVars;
177: } else {
178: /* We have to make sure that every processor is available at each iteration */
179: (*f)(0, 0, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL, ctx);
180: }
181: }
182: if (count != order->numLocVars) {
183: SETERRQ2(PETSC_ERR_PLIB, "Invalid number of variables modified %d should be %d", count, order->numLocVars);
184: }
185: VecRestoreArray(v, &array);
186: VecScale(&alpha, v);
187: return(0);
188: }
190: int GVecEvaluateFunctionGalerkin_Triangular_2D(Grid grid, GVec v, int numFields, int *fields, LocalVarOrdering locOrder,
191: PointFunction f, PetscScalar alpha, void *ctx)
192: {
193: Mesh mesh = grid->mesh;
194: int numElements = mesh->numFaces;
195: ElementVec vec = grid->vec;
196: int *elemStart = locOrder->elemStart;
197: PetscScalar *array = vec->array;
198: int field, fieldIdx, elem;
199: #ifdef PETSC_USE_BOPT_g
200: PetscTruth opt;
201: #endif
202: int ierr;
205: /* Loop over elements */
206: for(elem = 0; elem < numElements; elem++) {
207: /* Initialize element vector */
208: ElementVecZero(vec);
210: /* Get contribution to the element vector from each discretization */
211: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
212: field = fields[fieldIdx];
213: ierr = DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc, mesh, f, alpha, elem, &array[elemStart[field]], ctx);
214:
215: #ifdef PETSC_USE_BOPT_g
216: #endif
217: }
219: /* Setup global row and column indices */
220: GridCalcElementVecIndices(grid, elem, vec);
221: #ifdef PETSC_USE_BOPT_g
222: PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
223: if (opt == PETSC_TRUE) {
224: int var;
226: for(var = 0; var < vec->reduceSize; var++)
227: PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
228: }
229: #endif
230: /* Put values in global vector */
231: ElementVecSetValues(vec, v, ADD_VALUES);
232: }
234: VecAssemblyBegin(v);
235: VecAssemblyEnd(v);
236: return(0);
237: }
239: int GVecEvaluateFunctionGalerkinCollective_Triangular_2D(Grid grid, GVec v, int numFields, int *fields,
240: LocalVarOrdering locOrder, PointFunction f,
241: PetscScalar alpha, void *ctx)
242: {
243: Mesh mesh = grid->mesh;
244: int numElements = mesh->numFaces;
245: ElementVec vec = grid->vec;
246: int *elemStart = locOrder->elemStart;
247: PetscScalar *array = vec->array;
248: int maxElements;
249: int field, fieldIdx, elem;
250: #ifdef PETSC_USE_BOPT_g
251: PetscTruth opt;
252: #endif
253: int ierr;
256: MPI_Allreduce(&numElements, &maxElements, 1, MPI_INT, MPI_MAX, grid->comm);
257: /* Loop over elements */
258: for(elem = 0; elem < maxElements; elem++) {
259: if (elem < numElements) {
260: /* Initialize element vector */
261: ElementVecZero(vec);
263: /* Get contribution to the element vector from each discretization */
264: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
265: field = fields[fieldIdx];
266: ierr = DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc, mesh, f, alpha, elem, &array[elemStart[field]], ctx);
267:
268: #ifdef PETSC_USE_BOPT_g
269: #endif
270: }
272: /* Setup global row and column indices */
273: GridCalcElementVecIndices(grid, elem, vec);
274: #ifdef PETSC_USE_BOPT_g
275: PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
276: if (opt == PETSC_TRUE) {
277: int var;
279: for(var = 0; var < vec->reduceSize; var++)
280: PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
281: }
282: #endif
283: /* Put values in global vector */
284: ElementVecSetValues(vec, v, ADD_VALUES);
285: } else {
286: /* We have to make sure that every processor is available at each call to f */
287: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
288: field = fields[fieldIdx];
289: ierr = DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc, mesh, f, 0.0, -1, PETSC_NULL, ctx);
290: }
291: }
292: }
294: VecAssemblyBegin(v);
295: VecAssemblyEnd(v);
296: return(0);
297: }
299: int GVecEvaluateBoundaryFunctionGalerkin_Triangular_2D(Grid grid, GVec v, int numFields, int *fields,
300: LocalVarOrdering locOrder, PointFunction f, PetscScalar alpha, void *ctx)
301: {
302: Mesh mesh = grid->mesh;
303: Mesh_Triangular *tri = (Mesh_Triangular *) mesh->data;
304: Partition p = mesh->part;
305: Partition_Triangular_2D *q = (Partition_Triangular_2D *) p->data;
306: int elemSize = locOrder->elemSize;
307: int *elemStart = locOrder->elemStart;
308: int numEdges = mesh->numEdges;
309: int *bdEdges = tri->bdEdges;
310: int *firstEdge = q->firstEdge;
311: int rank = p->rank;
312: ElementVec vec; /* The element vector */
313: PetscScalar *array; /* The values in the element vector */
314: EdgeContext bdCtx; /* A context wrapper to communicate the midnode of an edge */
315: int field, edge, midNode;
316: int fieldIdx, bd, bdEdge;
317: #ifdef PETSC_USE_BOPT_g
318: PetscTruth opt;
319: #endif
320: int ierr;
323: /* Setup element vector for the lower dimensional system */
324: ierr = ElementVecCreate(grid->comm, elemSize, &vec);
325: array = vec->array;
327: /* Setup user context */
328: bdCtx.ctx = ctx;
330: /* Our problem here is that "edges" are not data structures like "elements". The element
331: holds the midnodes which appear on it, but edges do not. Thus we must pass the midnode
332: number to the discretization, which we do using a context wrapper. Unfortunately, the
333: row indices were derived from element, so we must introduce another numbering function
334: which operates on nodes alone. The midnode number is found by a search of the elements
335: which could certainly be improved with geometric hints. We might also assume that it
336: is the node lying between the two endpoints in the bdNodes[] array. In addition, the
337: boundary variable ordering is in relation to boundary node numbers, so that the node
338: number must be converted before calling the numbering function. This could be speeded up
339: by placing boundary node numbers in the bdEdges[] array instead. */
341: /* Loop over boundary edges */
342: for(bd = 0, bdEdge = 0; bd < grid->numBd; bd++) {
343: for(bdEdge = tri->bdEdgeBegin[bd]; bdEdge < tri->bdEdgeBegin[bd+1]; bdEdge++) {
344: /* Check that edge is on this processor */
345: edge = bdEdges[bdEdge] - firstEdge[rank];
346: if ((edge < 0) || (edge > numEdges))
347: continue;
349: /* Search for midnode on edge */
350: midNode = -1;
351: MeshGetMidnodeFromEdge(mesh, edge, &midNode);
352: bdCtx.midnode = midNode;
354: /* Initialize element matrix */
355: ElementVecZero(vec);
357: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
358: field = fields[fieldIdx];
359: DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc->bdDisc, mesh, f, alpha, edge, &array[elemStart[field]], &bdCtx);
360:
361: #ifdef PETSC_USE_BOPT_g
362: #endif
363: }
365: /* Setup global row and column indices */
366: GridCalcBoundaryElementVecIndices(grid, bd, edge, midNode, grid->bdOrder, PETSC_FALSE, vec);
367: #ifdef PETSC_USE_BOPT_g
368: PetscOptionsHasName(PETSC_NULL, "-trace_vec_bd_assembly", &opt);
369: if (opt == PETSC_TRUE) {
370: int var;
372: for(var = 0; var < vec->reduceSize; var++)
373: PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
374: }
375: #endif
376: /* Put values in global vector */
377: ElementVecSetValues(vec, v, ADD_VALUES);
378: }
379: }
380: #ifdef PETSC_USE_BOPT_g
381: if (bdEdge != mesh->numBdEdges) SETERRQ(PETSC_ERR_PLIB, "Invalid boundary edge numbering");
382: #endif
384: VecAssemblyBegin(v);
385: VecAssemblyEnd(v);
387: /* Cleanup */
388: ElementVecDestroy(vec);
390: return(0);
391: }
393: int GVecEvaluateBoundaryFunctionGalerkinCollective_Triangular_2D(Grid grid, GVec v, int numFields, int *fields,
394: LocalVarOrdering locOrder, PointFunction f,
395: PetscScalar alpha, void *ctx)
396: {
397: Mesh mesh = grid->mesh;
398: Mesh_Triangular *tri = (Mesh_Triangular *) mesh->data;
399: Partition p = mesh->part;
400: Partition_Triangular_2D *q = (Partition_Triangular_2D *) p->data;
401: int elemSize = locOrder->elemSize;
402: int *elemStart = locOrder->elemStart;
403: int numEdges = mesh->numEdges;
404: int *bdEdges = tri->bdEdges;
405: int *firstEdge = q->firstEdge;
406: int rank = p->rank;
407: ElementVec vec; /* The element vector */
408: PetscScalar *array; /* The values in the element vector */
409: EdgeContext bdCtx; /* A context wrapper to communicate the midnode of an edge */
410: int field, edge, midnode;
411: int fieldIdx, bd, bdEdge;
412: #ifdef PETSC_USE_BOPT_g
413: PetscTruth opt;
414: #endif
415: int ierr;
418: /* Setup element vector for the lower dimensional system */
419: ierr = ElementVecCreate(grid->comm, elemSize, &vec);
420: array = vec->array;
422: /* Setup user context */
423: bdCtx.ctx = ctx;
425: /* Our problem here is that "edges" are not data structures like "elements". The element
426: holds the midnodes which appear on it, but edges do not. Thus we must pass the midnode
427: number to the discretization, which we do using a context wrapper. Unfortunately, the
428: row indices were derived from element, so we must introduce another numbering function
429: which operates on nodes alone. The midnode number is found by a search of the elements
430: which could certainly be improved with geometric hints. We might also assume that it
431: is the node lying between the two endpoints in the bdNodes[] array. In addition, the
432: boundary variable ordering is in relation to boundary node numbers, so that the node
433: number must be converted before calling the numbering function. This could be speeded up
434: by placing boundary node numbers in the bdEdges[] array instead. */
436: /* Loop over boundary edges */
437: for(bd = 0, bdEdge = 0; bd < grid->numBd; bd++) {
438: for(bdEdge = tri->bdEdgeBegin[bd]; bdEdge < tri->bdEdgeBegin[bd+1]; bdEdge++) {
439: /* Check that edge is on this processor */
440: edge = bdEdges[bdEdge] - firstEdge[rank];
441: if ((edge < 0) || (edge > numEdges)) {
442: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
443: field = fields[fieldIdx];
444: ierr = DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc->bdDisc, mesh, f, 0.0, -1, PETSC_NULL, &bdCtx);
445:
446: }
447: continue;
448: }
450: /* Locate midnode on edge */
451: midnode = -1;
452: MeshGetMidnodeFromEdge(mesh, edge, &midnode);
453: bdCtx.midnode = midnode;
454: #ifdef PETSC_USE_BOPT_g
455: if (tri->markers[midnode] != tri->bdMarkers[bd])
456: SETERRQ4(PETSC_ERR_ARG_WRONG, "Invalid midnode %d has marker %d on boundary %d (%d)",
457: midnode, tri->markers[midnode], bd, tri->bdMarkers[bd]);
458: #endif
460: /* Initialize element matrix */
461: ElementVecZero(vec);
463: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++) {
464: field = fields[fieldIdx];
465: DiscretizationEvaluateFunctionGalerkin(grid->fields[field].disc->bdDisc, mesh, f, alpha, edge, &array[elemStart[field]], &bdCtx);
466:
467: #ifdef PETSC_USE_BOPT_g
468: #endif
469: }
471: /* Setup global row and column indices */
472: GridCalcBoundaryElementVecIndices(grid, bd, edge, midnode, grid->bdOrder, PETSC_FALSE, vec);
473: #ifdef PETSC_USE_BOPT_g
474: PetscOptionsHasName(PETSC_NULL, "-trace_vec_bd_assembly", &opt);
475: if (opt == PETSC_TRUE) {
476: int var;
478: for(var = 0; var < vec->reduceSize; var++)
479: PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
480: }
481: #endif
482: /* Put values in global vector */
483: ElementVecSetValues(vec, v, ADD_VALUES);
484: }
485: }
486: #ifdef PETSC_USE_BOPT_g
487: if (bdEdge != mesh->numBdEdges) SETERRQ(PETSC_ERR_PLIB, "Invalid boundary edge numbering");
488: #endif
490: VecAssemblyBegin(v);
491: VecAssemblyEnd(v);
493: /* Cleanup */
494: ElementVecDestroy(vec);
496: return(0);
497: }
499: int GVecEvaluateNonlinearOperatorGalerkin_Triangular_2D(Grid grid, GVec v, GVec x, GVec y, int numFields, int *fields,
500: LocalVarOrdering locOrder, NonlinearOperator op, PetscScalar alpha,
501: PetscTruth isALE, void *ctx)
502: {
503: Mesh mesh = grid->mesh;
504: int numElements = mesh->numFaces;
505: int *elemStart = locOrder->elemStart;
506: ElementVec vec = grid->vec;
507: PetscScalar *array = vec->array;
508: ElementVec ghostVec = grid->ghostElementVec; /* The local solution vector */
509: PetscScalar *ghostArray = ghostVec->array; /* The values in the ghost element vector */
510: PetscTruth reduceElement = grid->reduceElementArgs;
511: MeshMover mover;
512: Grid ALEGrid; /* The grid describing the mesh velocity */
513: Vec appVec; /* The local vec for y */
514: ElementVec elemAppVec; /* The element vec for y */
515: PetscScalar *appArray; /* The values in elemAppVec */
516: ElementVec MeshALEVec; /* ALE velocity vector with mesh discretization */
517: ElementVec ALEVec; /* ALE velocity vector */
518: PetscScalar *ALEArray; /* The values in the ALE element vector */
519: PetscScalar *nonlinearArgs[2];
520: PetscTruth ALEActive;
521: int field, fieldIdx, elem;
522: #ifdef PETSC_USE_BOPT_g
523: PetscTruth opt;
524: #endif
525: int ierr;
528: MeshGetMover(mesh, &mover);
529: MeshMoverGetVelocityGrid(mover, &ALEGrid);
530: if (grid->ALEActive && (isALE == PETSC_TRUE)) {
531: ALEActive = PETSC_TRUE;
532: } else {
533: ALEActive = PETSC_FALSE;
534: }
535: /* Fill the local solution vectors */
536: if (x != PETSC_NULL) {
537: GridGlobalToLocal(grid, INSERT_VALUES, x);
538: }
539: VecDuplicate(grid->ghostVec, &appVec);
540: ElementVecDuplicate(ghostVec, &elemAppVec);
541: if (y != PETSC_NULL) {
542: GridGlobalToLocalGeneral(grid, y, appVec, INSERT_VALUES, grid->ghostScatter);
543: }
544: appArray = elemAppVec->array;
546: /* Setup ALE variables */
547: if (ALEActive == PETSC_TRUE) {
548: /* Notice that the ALEArray is from this grid, not the mesh velocity grid */
549: MeshALEVec = ALEGrid->vec;
550: ierr = ElementVecDuplicate(grid->vec, &ALEVec);
551: ALEArray = ALEVec->array;
552: } else {
553: MeshALEVec = PETSC_NULL;
554: ALEArray = PETSC_NULL;
555: }
557: /* Loop over elements */
558: for(elem = 0; elem < numElements; elem++)
559: {
560: /* Initialize element vector */
561: ElementVecZero(vec);
563: /* Setup local row and column indices */
564: GridCalcLocalElementVecIndices(grid, elem, ghostVec);
565: ElementVecDuplicateIndices(ghostVec, elemAppVec);
567: /* Setup local solution vector */
568: GridLocalToElement(grid, ghostVec);
569: GridLocalToElementGeneral(grid, appVec, grid->bdReduceVecCur, grid->reduceSystem, reduceElement, elemAppVec);
571: /* Setup ALE variables */
572: if (ALEActive == PETSC_TRUE) {
573: GridCalcLocalElementVecIndices(ALEGrid, elem, MeshALEVec);
574: GridLocalToElement(ALEGrid, MeshALEVec);
575: }
577: /* Get contribution to the element vector from each discretization */
578: for(fieldIdx = 0; fieldIdx < numFields; fieldIdx++)
579: {
580: field = fields[fieldIdx];
581: nonlinearArgs[0] = &ghostArray[elemStart[field]];
582: nonlinearArgs[1] = &appArray[elemStart[field]];
583: if (ALEActive == PETSC_TRUE)
584: {
585: GridInterpolateElementVec(ALEGrid, 0, MeshALEVec, grid, field, ALEVec);
586: DiscretizationEvaluateNonlinearALEOperatorGalerkin(grid->fields[field].disc, mesh, op, alpha, elem, 2, nonlinearArgs,
587: ALEArray, &array[elemStart[field]], ctx);
588:
589: }
590: else
591: {
592: DiscretizationEvaluateNonlinearOperatorGalerkin(grid->fields[field].disc, mesh, op, alpha, elem, 2, nonlinearArgs,
593: &array[elemStart[field]], ctx);
594:
595: }
596: #ifdef PETSC_USE_BOPT_g
597: #endif
598: }
600: /* Setup global row and column indices */
601: GridCalcElementVecIndices(grid, elem, vec);
602: #ifdef PETSC_USE_BOPT_g
603: PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
604: if (opt == PETSC_TRUE) {
605: int var;
607: for(var = 0; var < vec->reduceSize; var++)
608: PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
609: }
610: #endif
611: /* Put values in global vector */
612: ElementVecSetValues(vec, v, ADD_VALUES);
613: }
615: /* Cleanup ALE variables */
616: if (ALEActive == PETSC_TRUE) {
617: ElementVecDestroy(ALEVec);
618: }
619: /* Cleanup additional input vectors */
620: VecDestroy(appVec);
621: ElementVecDestroy(elemAppVec);
622: VecAssemblyBegin(v);
623: VecAssemblyEnd(v);
624: return(0);
625: }
627: int GVecEvaluateOperatorGalerkin_Triangular_2D(Grid grid, GVec v, GVec x, GVec y, VarOrdering sOrder, LocalVarOrdering sLocOrder,
628: VarOrdering tOrder, LocalVarOrdering tLocOrder, int op, PetscScalar alpha, void *ctx)
629: {
630: Mesh mesh = grid->mesh;
631: PetscTruth reduceSystem = grid->reduceSystem;
632: PetscTruth reduceElement = grid->reduceElement;
633: int sElemSize = sLocOrder->elemSize;
634: int *sElemStart = sLocOrder->elemStart;
635: int tElemSize = tLocOrder->elemSize;
636: int *tElemStart = tLocOrder->elemStart;
637: FieldClassMap sMap, tMap;
638: int numSFields, numTFields;
639: int *sFields, *tFields;
640: PetscTruth sConstrained, tConstrained;
641: Vec ghostVec; /* The local ghost vector for x (usually the solution) */
642: VecScatter ghostScatter; /* The scatter from x to ghostVec */
643: Vec appVec; /* The local ghost vector for y (usually the application vector) */
644: VecScatter appScatter; /* The scatter from y to appVec */
645: ElementMat mat;
646: ElementVec elemGhostVec, elemAppVec, vec;
647: PetscScalar *ghostArray, *appArray, *matArray, *array;
648: int numElements;
649: int f, sField, tField, elem;
650: #ifdef PETSC_USE_BOPT_g
651: PetscTruth opt;
652: #endif
653: int ierr;
656: MeshGetInfo(mesh, PETSC_NULL, PETSC_NULL, PETSC_NULL, &numElements);
657: VarOrderingGetClassMap(sOrder, &sMap);
658: VarOrderingGetClassMap(tOrder, &tMap);
659: numSFields = sMap->numFields;
660: sFields = sMap->fields;
661: sConstrained = sMap->isConstrained;
662: numTFields = tMap->numFields;
663: tFields = tMap->fields;
664: tConstrained = tMap->isConstrained;
665: /* Setup reduction */
666: (*grid->ops->gridsetupghostscatter)(grid, tOrder, &ghostVec, &ghostScatter);
667: (*grid->ops->gridsetupghostscatter)(grid, sOrder, &appVec, &appScatter);
668: /* Setup element vector and matrix */
669: if (tConstrained == PETSC_TRUE) {
670: for(f = 0; f < numTFields; f++) {
671: if (grid->fields[tFields[f]].isConstrained == PETSC_TRUE)
672: tElemSize += grid->fields[tFields[f]].disc->funcs*grid->fields[tFields[f]].constraintCompDiff;
673: }
674: }
675: if (sConstrained == PETSC_TRUE) {
676: for(f = 0; f < numSFields; f++) {
677: if (grid->fields[sFields[f]].isConstrained == PETSC_TRUE)
678: sElemSize += grid->fields[sFields[f]].disc->funcs*grid->fields[sFields[f]].constraintCompDiff;
679: }
680: }
681: ierr = ElementVecCreate(grid->comm, tElemSize, &vec);
682: array = vec->array;
683: ierr = ElementVecDuplicate(vec, &elemGhostVec);
684: ghostArray = elemGhostVec->array;
685: ierr = ElementVecCreate(grid->comm, sElemSize, &elemAppVec);
686: appArray = elemAppVec->array;
687: ierr = ElementMatCreate(grid->comm, tElemSize, sElemSize, &mat);
688: matArray = mat->array;
690: /* Fill the local solution vectors */
691: GridGlobalToLocalGeneral(grid, x, ghostVec, INSERT_VALUES, ghostScatter);
692: GridGlobalToLocalGeneral(grid, y, appVec, INSERT_VALUES, appScatter);
694: /* Setup the operator with information about the test function space */
695: for(f = 0; f < numSFields; f++) {
696: grid->fields[sFields[f]].disc->operators[op]->test = grid->fields[tFields[f]].disc;
697: }
699: /* Loop over elements */
700: for(elem = 0; elem < numElements; elem++) {
701: /* Initialize element vector */
702: ElementVecZero(vec);
703: vec->reduceSize = tLocOrder->elemSize;
704: elemGhostVec->reduceSize = tLocOrder->elemSize;
705: elemAppVec->reduceSize = sLocOrder->elemSize;
707: /* Setup local row indices */
708: GridCalcGeneralElementVecIndices(grid, elem, tOrder, PETSC_NULL, PETSC_TRUE, elemGhostVec);
709: GridCalcGeneralElementVecIndices(grid, elem, sOrder, PETSC_NULL, PETSC_TRUE, elemAppVec);
710: /* Setup local vectors */
711: GridLocalToElementGeneral(grid, ghostVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemGhostVec);
712: GridLocalToElementGeneral(grid, appVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemAppVec);
713: /* Must transform to unconstrained variables for element integrals */
714: GridProjectElementVec(grid, mesh, elem, tOrder, PETSC_FALSE, elemGhostVec);
715: GridProjectElementVec(grid, mesh, elem, sOrder, PETSC_FALSE, elemAppVec);
717: for(f = 0; f < numSFields; f++) {
718: sField = sFields[f];
719: tField = tFields[f];
720: /* Get contribution to the element vector from the linear operator */
721: ElementMatZero(mat);
722: DiscretizationEvaluateOperatorGalerkinMF(grid->fields[sField].disc, mesh, sElemSize, tElemStart[tField], sElemStart[sField],
723: op, alpha, elem, &ghostArray[sElemStart[sField]],
724: &appArray[sElemStart[sField]], array, matArray, ctx);
725:
726: #ifdef PETSC_USE_BOPT_g
727: #endif
728: }
730: /* Setup global row indices, with reduction if necessary */
731: GridCalcGeneralElementVecIndices(grid, elem, tOrder, PETSC_NULL, PETSC_FALSE, vec);
732: #ifdef PETSC_USE_BOPT_g
733: PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
734: if (opt == PETSC_TRUE) {
735: int var;
737: for(var = 0; var < vec->reduceSize; var++)
738: PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
739: }
740: #endif
741: /* Put values in global vector */
742: ElementVecSetValues(vec, v, ADD_VALUES);
743: }
745: VecDestroy(ghostVec);
746: VecScatterDestroy(ghostScatter);
747: VecDestroy(appVec);
748: VecScatterDestroy(appScatter);
749: ElementVecDestroy(elemGhostVec);
750: ElementVecDestroy(elemAppVec);
751: ElementVecDestroy(vec);
752: ElementMatDestroy(mat);
753: VecAssemblyBegin(v);
754: VecAssemblyEnd(v);
755: return(0);
756: }
758: int GVecEvaluateSystemMatrix_Triangular_2D(Grid grid, GVec x, GVec y, GVec f, void *ctx)
759: {
760: Mesh mesh = grid->mesh;
761: int numElements = mesh->numFaces;
762: int numMatOps = grid->numMatOps; /* The number of operators in the matrix */
763: GridOp *matOps = grid->matOps; /* The operators in the system matrix */
764: PetscTruth reduceSystem = grid->reduceSystem;
765: PetscTruth reduceElement = grid->reduceElement;
766: PetscTruth explicitConstraints = grid->explicitConstraints;
767: PetscConstraintObject constCtx = grid->constraintCtx; /* The constraint object */
768: int numNewFields = grid->numNewFields; /* The number of new fields added by constraints */
769: ElementVec vec = grid->vec; /* The element vector */
770: PetscScalar *array = vec->array; /* The values in the element vector */
771: ElementMat mat = grid->mat; /* The element matrix */
772: PetscScalar *matArray = mat->array; /* The values in the element matrix */
773: Vec ghostVec = grid->ghostVec; /* The local solution vector */
774: ElementVec elemGhostVec = grid->ghostElementVec; /* Local solution vector */
775: PetscScalar *ghostArray = elemGhostVec->array; /* The values in the ghost element vector */
776: int numFields = grid->cm->numFields; /* The number of fields in the calculation */
777: int *fields = grid->cm->fields; /* The fields participating in the calculation */
778: LocalVarOrdering locOrder = grid->locOrder; /* The default local variable ordering */
779: int elemSize = locOrder->elemSize; /* The number of shape functions in the element matrix */
780: int *elemStart = locOrder->elemStart; /* The offset of each field in the element matrix */
781: int rank = mesh->part->rank; /* The processor rank */
782: MeshMover mover;
783: Grid ALEGrid; /* The grid describing the mesh velocity */
784: VarOrdering order; /* The default variable ordering */
785: ElementVec MeshALEVec; /* ALE velocity vector with mesh discretization */
786: ElementVec ALEVec; /* ALE velocity vector */
787: PetscScalar *ALEArray; /* The values in the ALE element vector */
788: Vec appVec; /* The local vec for y */
789: ElementVec elemAppVec; /* The element vec for y */
790: PetscScalar *appArray; /* The values in elemAppVec */
791: int elem, fieldIndex;
792: int newComp = 0;
793: int sField, tField, op, newField, row, col;
794: #ifdef PETSC_USE_BOPT_g
795: PetscTruth opt;
796: #endif
797: int ierr;
800: MeshGetMover(mesh, &mover);
801: MeshMoverGetVelocityGrid(mover, &ALEGrid);
802: PetscObjectQuery((PetscObject) x, "Order", (PetscObject *) &order);
803: /* Right now, we ignore the preconditioner */
804: /* Fill the local solution vectors */
805: if (x != PETSC_NULL) {
806: GridGlobalToLocal(grid, INSERT_VALUES, x);
807: }
808: VecDuplicate(ghostVec, &appVec);
809: GridGlobalToLocalGeneral(grid, y, appVec, INSERT_VALUES, grid->ghostScatter);
810: ElementVecDuplicate(elemGhostVec, &elemAppVec);
811: appArray = elemAppVec->array;
813: /* Setup ALE variables */
814: if (grid->ALEActive == PETSC_TRUE) {
815: /* Notice that the ALEArray is from this grid, not the mesh velocity grid */
816: MeshALEVec = ALEGrid->vec;
817: ierr = ElementVecDuplicate(grid->vec, &ALEVec);
818: ALEArray = ALEVec->array;
819: } else {
820: MeshALEVec = PETSC_NULL;
821: ALEArray = PETSC_NULL;
822: }
824: /* Loop over elements */
825: for(elem = 0; elem < numElements; elem++)
826: {
827: /* Initialize element vector */
828: ElementVecZero(vec);
829: vec->reduceSize = locOrder->elemSize;
830: elemGhostVec->reduceSize = locOrder->elemSize;
831: elemAppVec->reduceSize = locOrder->elemSize;
833: /* Setup global row and column indices */
834: GridCalcLocalElementVecIndices(grid, elem, elemGhostVec);
835: ElementVecDuplicateIndices(elemGhostVec, elemAppVec);
836: elemAppVec->reduceSize = elemGhostVec->reduceSize;
838: /* Setup local solution vector */
839: GridLocalToElementGeneral(grid, ghostVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemGhostVec);
840: GridLocalToElementGeneral(grid, appVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemAppVec);
842: /* Must transform to unconstrained variables for element integrals */
843: GridProjectElementVec(grid, mesh, elem, order, PETSC_FALSE, elemGhostVec);
844: GridProjectElementVec(grid, mesh, elem, order, PETSC_FALSE, elemAppVec);
846: /* Setup ALE variables */
847: if (grid->ALEActive == PETSC_TRUE) {
848: GridCalcLocalElementVecIndices(ALEGrid, elem, MeshALEVec);
849: GridLocalToElement(ALEGrid, MeshALEVec);
850: }
852: /* Calculate the contribution to the element matrix from each field */
853: for(op = 0; op < numMatOps; op++) {
854: sField = matOps[op].field;
855: tField = grid->fields[sField].disc->operators[matOps[op].op]->test->field;
856: if (grid->fields[sField].isActive) {
857: ElementMatZero(mat);
858: if (matOps[op].isALE) {
859: GridInterpolateElementVec(ALEGrid, 0, MeshALEVec, grid, sField, ALEVec);
860: DiscretizationEvaluateALEOperatorGalerkinMF(grid->fields[sField].disc, mesh, elemSize, elemStart[tField], elemStart[sField],
861: matOps[op].op, matOps[op].alpha, elem, &ghostArray[elemStart[sField]],
862: &appArray[elemStart[sField]], ALEArray, array, matArray, ctx);
863:
864: } else {
865: DiscretizationEvaluateOperatorGalerkinMF(grid->fields[sField].disc, mesh, elemSize, elemStart[tField], elemStart[sField],
866: matOps[op].op, matOps[op].alpha, elem, &ghostArray[elemStart[sField]],
867: &appArray[elemStart[sField]], array, matArray, ctx);
868:
869: }
870: #ifdef PETSC_USE_BOPT_g
871: #endif
872: }
873: }
875: /* Setup global numbering, with reduction if necessary */
876: GridCalcGeneralElementVecIndices(grid, elem, order, PETSC_NULL, PETSC_FALSE, vec);
877: #ifdef PETSC_USE_BOPT_g
878: PetscOptionsHasName(PETSC_NULL, "-trace_vec_assembly", &opt);
879: if (opt == PETSC_TRUE) {
880: int var;
881: for(var = 0; var < vec->reduceSize; var++)
882: PetscPrintf(PETSC_COMM_SELF, "%2d %4.2gn", vec->indices[var], PetscRealPart(array[var]));
883: }
884: #endif
885: /* Put values in global vector */
886: ElementVecSetValues(vec, f, ADD_VALUES);
887: }
889: /* Evaluate self-interaction of new fields created by constraints */
890: if (explicitConstraints == PETSC_TRUE) {
891: /* WARNING: This only accomodates 1 constrained field */
892: /* Get constraint information */
893: for(fieldIndex = 0; fieldIndex < numFields; fieldIndex++) {
894: sField = fields[fieldIndex];
895: if (grid->fields[sField].isConstrained == PETSC_TRUE) {
896: newComp = grid->fields[sField].numComp + grid->fields[sField].constraintCompDiff;
897: break;
898: }
899: }
900: /* Calculate self-interaction */
901: for(newField = 0; newField < numNewFields; newField++) {
902: /* Initialize element matrix and vector */
903: ElementMatZero(mat);
904: ElementVecZero(vec);
905: mat->reduceRowSize = newComp;
906: mat->reduceColSize = newComp;
907: elemAppVec->reduceSize = newComp;
908: vec->reduceSize = newComp;
910: /* Calculate the indices and contribution to the element matrix from the new field */
911: (*constCtx->ops->newelemmat)(constCtx, order, newField, mat);
912: #ifdef PETSC_USE_BOPT_g
913: PetscOptionsHasName(PETSC_NULL, "-trace_mat_assembly_constrained", &opt);
914: if (opt == PETSC_TRUE) {
915: ElementMatView(mat, PETSC_VIEWER_STDOUT_(mat->comm));
916: }
917: #endif
918: /* Global vector indices are the same as the matrix indices */
919: for(row = 0; row < mat->reduceRowSize; row++) {
920: vec->indices[row] = mat->rowIndices[row];
921: }
922: /* Local vector indices can be calculated directly from the field number */
923: elemAppVec->indices[0] = grid->constraintOrder->firstVar[rank+1] - (numNewFields - newField)*newComp;
924: for(row = 1; row < elemAppVec->reduceSize; row++) {
925: elemAppVec->indices[row] = elemAppVec->indices[row-1]+1;
926: }
927: /* Retrieve element vector values from x */
928: GridLocalToElementGeneral(grid, appVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemAppVec);
929: /* Multiply element matrix and element vector */
930: for(row = 0; row < mat->reduceRowSize; row++) {
931: for(col = 0; col < mat->reduceColSize; col++) {
932: vec->array[row] += mat->array[row*mat->reduceColSize+col]*elemAppVec->array[col];
933: }
934: }
935: PetscLogFlops(2*mat->reduceRowSize*mat->reduceColSize);
936: /* Put values in global vector */
937: ElementVecSetValues(vec, f, ADD_VALUES);
938: #ifdef PETSC_USE_BOPT_g
939: #endif
940: }
941: }
943: /* Reset element vector */
944: elemGhostVec->reduceSize = locOrder->elemSize;
946: VecDestroy(appVec);
947: ElementVecDestroy(elemAppVec);
948: if (grid->ALEActive == PETSC_TRUE) {
949: ElementVecDestroy(ALEVec);
950: }
951: VecAssemblyBegin(f);
952: VecAssemblyEnd(f);
953: return(0);
954: }
956: int GVecEvaluateSystemMatrixDiagonal_Triangular_2D(Grid grid, GVec x, GVec d, void *ctx)
957: {
958: Mesh mesh = grid->mesh;
959: int numNewFields = grid->numNewFields; /* The number of new fields added by constraints */
960: int numMatOps = grid->numMatOps; /* The number of operators in the matrix */
961: GridOp *matOps = grid->matOps; /* The operators in the system matrix */
962: VarOrdering constOrder = grid->constraintOrder; /* The constrained variable ordering */
963: PetscTruth reduceSystem = grid->reduceSystem;
964: PetscTruth reduceElement = grid->reduceElement;
965: PetscTruth expConst = grid->explicitConstraints;
966: PetscConstraintObject constCtx = grid->constraintCtx; /* The constraint object */
967: int numFields = grid->cm->numFields; /* The number of fields in the calculation */
968: int *fields = grid->cm->fields; /* The fields participating in the calculation */
969: LocalVarOrdering locOrder = grid->locOrder; /* The default local variable ordering */
970: int elemSize = locOrder->elemSize; /* The number of shape functions in the element matrix */
971: int *elemStart = locOrder->elemStart; /* The offset of each field in the element matrix */
972: ElementMat mat = grid->mat; /* The element matrix */
973: PetscScalar *array = mat->array; /* The values in the element matrix */
974: Vec ghostVec = grid->ghostVec; /* The local solution vector */
975: ElementVec elemGhostVec = grid->ghostElementVec; /* The element vector from ghostVec */
976: PetscScalar *ghostArray = elemGhostVec->array; /* The values in elemGhostVec */
977: MeshMover mover;
978: Grid ALEGrid; /* The grid describing the mesh velocity */
979: VarOrdering order; /* The default variable ordering */
980: ElementVec MeshALEVec; /* ALE velocity vector with mesh discretization */
981: ElementVec ALEVec; /* ALE velocity vector */
982: PetscScalar *ALEArray; /* The values in the ALE element vector */
983: int newComp = 0;
984: int numElements;
985: int elem, f, sField, tField, op, newField;
986: #ifdef PETSC_USE_BOPT_g
987: PetscTruth opt;
988: #endif
989: int ierr;
992: MeshGetMover(mesh, &mover);
993: MeshMoverGetVelocityGrid(mover, &ALEGrid);
994: MeshGetInfo(mesh, PETSC_NULL, PETSC_NULL, PETSC_NULL, &numElements);
995: if (expConst == PETSC_TRUE) {
996: order = grid->constraintOrder;
997: } else {
998: order = grid->order;
999: }
1000: /* Fill the local solution vectors */
1001: if (x != PETSC_NULL) {
1002: GridGlobalToLocal(grid, INSERT_VALUES, x);
1003: }
1005: /* Setup ALE variables -- No new variables should be ALE so ALEVec is not recalculated */
1006: if (grid->ALEActive == PETSC_TRUE) {
1007: /* Notice that the ALEArray is from this grid, not the mesh velocity grid */
1008: MeshALEVec = ALEGrid->vec;
1009: ierr = ElementVecDuplicate(grid->vec, &ALEVec);
1010: ALEArray = ALEVec->array;
1011: } else {
1012: MeshALEVec = PETSC_NULL;
1013: ALEArray = PETSC_NULL;
1014: }
1016: /* Loop over elements */
1017: for(elem = 0; elem < numElements; elem++) {
1018: /* Initialize element matrix */
1019: ElementMatZero(mat);
1020: mat->reduceRowSize = locOrder->elemSize;
1021: mat->reduceColSize = locOrder->elemSize;
1022: elemGhostVec->reduceSize = locOrder->elemSize;
1024: /* Setup local row indices for the ghost vector */
1025: GridCalcLocalElementVecIndices(grid, elem, elemGhostVec);
1026: /* Setup local solution vector */
1027: GridLocalToElementGeneral(grid, ghostVec, grid->bdReduceVecCur, reduceSystem, reduceElement, elemGhostVec);
1028: /* Must transform to unconstrained variables for element integrals */
1029: GridProjectElementVec(grid, mesh, elem, order, PETSC_FALSE, elemGhostVec);
1031: /* Setup ALE variables */
1032: if (grid->ALEActive == PETSC_TRUE) {
1033: GridCalcLocalElementVecIndices(ALEGrid, elem, MeshALEVec);
1034: GridLocalToElement(ALEGrid, MeshALEVec);
1035: }
1037: /* Calculate the contribution to the element matrix from each field */
1038: for(op = 0; op < numMatOps; op++) {
1039: sField = matOps[op].field;
1040: tField = grid->fields[sField].disc->operators[matOps[op].op]->test->field;
1041: if (grid->fields[sField].isActive) {
1042: if (matOps[op].isALE) {
1043: GridInterpolateElementVec(ALEGrid, 0, MeshALEVec, grid, sField, ALEVec);
1044: DiscretizationEvaluateALEOperatorGalerkin(grid->fields[sField].disc, mesh, elemSize, elemStart[tField], elemStart[sField],
1045: matOps[op].op, matOps[op].alpha, elem, &ghostArray[elemStart[sField]],
1046: ALEArray, array, ctx);
1047:
1048: } else {
1049: DiscretizationEvaluateOperatorGalerkin(grid->fields[sField].disc, mesh, elemSize, elemStart[tField], elemStart[sField],
1050: matOps[op].op, matOps[op].alpha, elem, &ghostArray[elemStart[sField]],
1051: array, ctx);
1052:
1053: }
1054: #ifdef PETSC_USE_BOPT_g
1055: #endif
1056: }
1057: }
1059: /* Setup global numbering, with reduction if necessary */
1060: GridCalcGeneralElementMatIndices(grid, elem, order, order, PETSC_FALSE, mat);
1061: #ifdef PETSC_USE_BOPT_g
1062: PetscOptionsHasName(PETSC_NULL, "-trace_mat_assembly", &opt);
1063: if (opt == PETSC_TRUE) {
1064: ElementMatView(mat, PETSC_VIEWER_STDOUT_(mat->comm));
1065: }
1066: #endif
1067: /* Put diagonal values in the global matrix */
1068: ElementMatSetDiagonalValues(mat, d, ADD_VALUES);
1069: }
1071: /* Evaluate self-interaction of new fields created by constraints */
1072: if (expConst == PETSC_TRUE) {
1073: /* WARNING: This only accomodates 1 constrained field */
1074: /* Get constraint information */
1075: for(f = 0; f < numFields; f++) {
1076: sField = fields[f];
1077: if (grid->fields[sField].isConstrained == PETSC_TRUE) {
1078: newComp = grid->fields[sField].numComp + grid->fields[sField].constraintCompDiff;
1079: break;
1080: }
1081: }
1082: /* Calculate self-interaction */
1083: for(newField = 0; newField < numNewFields; newField++) {
1084: /* Initialize element matrix */
1085: ElementMatZero(mat);
1086: mat->reduceRowSize = newComp;
1087: mat->reduceColSize = newComp;
1089: /* Calculate the indices and contribution to the element matrix from the new field */
1090: (*constCtx->ops->newelemmat)(constCtx, constOrder, newField, mat);
1091: #ifdef PETSC_USE_BOPT_g
1092: PetscOptionsHasName(PETSC_NULL, "-trace_mat_assembly_constrained", &opt);
1093: if (opt == PETSC_TRUE) {
1094: ElementMatView(mat, PETSC_VIEWER_STDOUT_(mat->comm));
1095: }
1096: #endif
1097: /* Put values in global matrix */
1098: ElementMatSetDiagonalValues(mat, d, ADD_VALUES);
1099: #ifdef PETSC_USE_BOPT_g
1100: #endif
1101: }
1102: }
1104: /* Assemble matrix */
1105: VecAssemblyBegin(d);
1106: VecAssemblyEnd(d);
1108: /* Reset element matrix and vector */
1109: mat->reduceRowSize = locOrder->elemSize;
1110: mat->reduceColSize = locOrder->elemSize;
1111: elemGhostVec->reduceSize = locOrder->elemSize;
1113: /* Cleanup */
1114: if (grid->ALEActive == PETSC_TRUE) {
1115: ElementVecDestroy(ALEVec);
1116: }
1118: return(0);
1119: }