Actual source code: gridBC.c
1: #ifdef PETSC_RCS_HEADER
2: static char vcid[] = "$Id: gridBC.c,v 1.3 2000/01/30 18:27:13 huangp Exp $";
3: #endif
5: #include "src/grid/gridimpl.h" /*I "grid.h" I*//*I "gvec.h" I*/
6: #include "src/vec/impls/mpi/pvecimpl.h" /* For GridCalcBCValues(), soon will not be needed */
8: /*------------------------------------------------ Standard Functions -----------------------------------------------*/
9: /*@
10: GridDuplicateBC - Duplicates the boundary conditions of one grid in another.
12: Collective on Grid
14: Input Parameter:
15: . grid - The grid
17: Output Parameter:
18: . newGrid - The altered grid
20: Level: intermediate
22: .keywords: grid, duplicate, BC
23: .seealso: GridDuplicate()
24: @*/
25: int GridDuplicateBC(Grid grid, Grid newGrid)
26: {
27: int bc;
33: for(bc = 0; bc < grid->numBC; bc++) {
34: GridAddBC(newGrid, grid->bc[bc].boundary, grid->bc[bc].field, grid->bc[bc].func, grid->bc[bc].reduce);
35: }
36: for(bc = 0; bc < grid->numPointBC; bc++) {
37: GridAddPointBC(newGrid, grid->bc[bc].point[0], grid->bc[bc].point[1], grid->bc[bc].point[2], grid->bc[bc].field,
38: grid->bc[bc].func, grid->bc[bc].reduce);
39: }
40: return(0);
41: }
43: /*@
44: GridFinalizeBC - Destroys all structures associated with explicit system
45: reduction using boundary conditions. This should be called after all
46: calculation is finished, prior to GridDestroy().
48: Collective on Grid
50: Input Parameter:
51: . grid - The grid
53: Level: beginner
55: .keywords: grid, boundary conditions
56: .seealso: GridSetBC(), GridAddBC()
57: @*/
58: int GridFinalizeBC(Grid grid)
59: {
64: if (grid->bdReduceVec) {
65: GVecDestroy(grid->bdReduceVec);
66: }
67: if (grid->bdReduceVecOld) {
68: GVecDestroy(grid->bdReduceVecOld);
69: }
70: if (grid->bdReduceVecDiff) {
71: GVecDestroy(grid->bdReduceVecDiff);
72: }
73: if (grid->bdReduceMat) {
74: GMatDestroy(grid->bdReduceMat);
75: }
76: if (grid->reduceVec) {
77: GVecDestroy(grid->reduceVec);
78: }
80: return(0);
81: }
83: /*----------------------------------------------- Database Functions ------------------------------------------------*/
84: /*@C GridSetBC
85: This function sets the boundary condition to use for the problem.
87: Collective on Grid
89: Input Parameters:
90: + grid - The grid
91: . bd - The marker for the boundary along which conditions are applied
92: . field - The field to which the boundary condition is applied
93: . f - The function which defines the boundary condition
94: - reduce - The flag for explicit reduction of the system
96: Level: intermediate
98: .keywords active field
99: .seealso GridAddBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
100: @*/
101: int GridSetBC(Grid grid, int bd, int field, PointFunction f, PetscTruth reduce)
102: {
107: GridValidField(grid, field);
108: grid->numBC = 0;
109: GridAddBC(grid, bd, field, f, reduce);
110: return(0);
111: }
113: /*@C GridAddBC
114: This function adds a boundary condition to use for the problem.
116: Collective on Grid
118: Input Parameters:
119: + grid - The grid
120: . bd - The marker for the boundary along which conditions are applied
121: . field - The field to which the boundary condition is applied
122: . f - The function which defines the boundary condition
123: - reduce - The flag for explicit reduction of the system
125: Level: intermediate
127: .keywords active field
128: .seealso GridSetBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
129: @*/
130: int GridAddBC(Grid grid, int bd, int field, PointFunction f, PetscTruth reduce)
131: {
132: GridBC *tempBC;
133: int bdIndex;
134: int ierr;
138: GridValidField(grid, field);
139: while (grid->numBC >= grid->maxBC) {
140: PetscMalloc(grid->maxBC*2 * sizeof(GridBC), &tempBC);
141: PetscLogObjectMemory(grid, grid->maxBC * sizeof(GridBC));
142: PetscMemcpy(tempBC, grid->bc, grid->maxBC * sizeof(GridBC));
143: PetscFree(grid->bc);
144: grid->bc = tempBC;
145: grid->maxBC *= 2;
146: }
147: /* Make sure boundary is legal */
148: MeshGetBoundaryIndex(grid->mesh, bd, &bdIndex);
149: grid->bc[grid->numBC].boundary = bd;
150: grid->bc[grid->numBC].field = field;
151: grid->bc[grid->numBC].func = f;
152: grid->bc[grid->numBC].reduce = reduce;
153: grid->bc[grid->numBC].node = -1;
154: grid->numBC++;
155: /* Check whether to reduce system */
156: if (reduce == PETSC_TRUE) grid->reduceSystem = PETSC_TRUE;
157: return(0);
158: }
160: /*@C GridSetPointBC
161: This function sets the boundary condition to use for the problem at a point.
163: Collective on Grid
165: Input Parameters:
166: + grid - The grid
167: . x,y,z - The point at which conditions are applied
168: . field - The field to which the boundary condition is applied
169: . f - The function which defines the boundary condition
170: - reduce - The flag for explicit reduction of the system
172: Level: intermediate
174: .keywords active field
175: .seealso GridAddBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
176: @*/
177: int GridSetPointBC(Grid grid, double x, double y, double z, int field, PointFunction f, PetscTruth reduce)
178: {
183: grid->numPointBC = 0;
184: GridAddPointBC(grid, x, y, z, field, f, reduce);
185: return(0);
186: }
188: /*@C GridAddPointBC
189: This function adds a boundary condition to use for the problem at a point.
191: Collective on Grid
193: Input Parameters:
194: + grid - The grid
195: . x,y,z - The point at which conditions are applied
196: . field - The field to which the boundary condition is applied
197: . f - The function which defines the boundary condition
198: - reduce - The flag for explicit reduction of the system
200: Level: intermediate
202: .keywords active field
203: .seealso GridSetBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
204: @*/
205: int GridAddPointBC(Grid grid, double x, double y, double z, int field, PointFunction f, PetscTruth reduce)
206: {
207: GridBC *tempBC;
208: int ierr;
212: GridValidField(grid, field);
213: while (grid->numPointBC >= grid->maxPointBC) {
214: PetscMalloc(grid->maxPointBC*2 * sizeof(GridBC), &tempBC);
215: PetscLogObjectMemory(grid, grid->maxPointBC * sizeof(GridBC));
216: PetscMemcpy(tempBC, grid->pointBC, grid->maxPointBC * sizeof(GridBC));
217: PetscFree(grid->pointBC);
218: grid->pointBC = tempBC;
219: grid->maxPointBC *= 2;
220: }
221: if (GridGetNearestBdNode(grid, field, x, y, z, &grid->pointBC[grid->numPointBC].node)) {
222: SETERRQ3(PETSC_ERR_ARG_WRONG, "Invalid point {%g,%g,%g} specified for boundary condition", x, y, z);
223: }
224: grid->pointBC[grid->numPointBC].point[0] = x;
225: grid->pointBC[grid->numPointBC].point[1] = y;
226: grid->pointBC[grid->numPointBC].point[2] = z;
227: grid->pointBC[grid->numPointBC].field = field;
228: grid->pointBC[grid->numPointBC].func = f;
229: grid->pointBC[grid->numPointBC].reduce = reduce;
230: grid->pointBC[grid->numPointBC].boundary = -1;
231: grid->numPointBC++;
232: /* Check whether to reduce system */
233: if (reduce == PETSC_TRUE) grid->reduceSystem = PETSC_TRUE;
234: return(0);
235: }
237: /*@
238: GridSetBCMultiplier - This sets the scalar multiplier used for reduction components on the rhs.
240: Collective on Grid
242: Input Parameters:
243: + grid - The grid
244: - alpha - The scalar multiplier
246: Note:
247: For example, this should be -1 in a nonlinear iteration. The default is 1.
249: Level: developer
251: .keywords: grid, reduction, boundary conditions
252: .seealso: GridGetBCMultiplier(), GridSetBC(), GridAddBC()
253: @*/
254: int GridSetBCMultiplier(Grid grid, PetscScalar alpha)
255: {
258: grid->reduceAlpha = alpha;
259: return(0);
260: }
262: /*@
263: GridGetBCMultiplier - This gets the scalar multiplier used for reduction components on the rhs.
265: Not collective
267: Input Parameter:
268: . grid - The grid
270: Output Parameter:
271: . alpha - The scalar multiplier
273: Level: developer
275: .keywords: grid, reduction, boundary conditions
276: .seealso: GridSetBCMultiplier(), GridSetBC(), GridAddBC()
277: @*/
278: int GridGetBCMultiplier(Grid grid, PetscScalar *alpha)
279: {
283: *alpha = grid->reduceAlpha;
284: return(0);
285: }
287: /*@
288: GridSetBCContext - This sets the optional user context passed to all
289: routines which assemble boundary reduction information. Must be called
290: before GridSetUp().
292: Collective on Grid
294: Input Parameters:
295: + grid - The grid
296: - ctx - The context
298: Level: intermediate
300: .keywords: grid, reduction, boundary conditions
301: .seealso: GridGetBCContext(), GridSetBC(), GridAddBC()
302: @*/
303: int GridSetBCContext(Grid grid, void *ctx)
304: {
307: grid->reduceContext = ctx;
308: return(0);
309: }
311: /*@
312: GridGetBCContext - This gets the optional user context passed to all
313: routines which assemble boundary reduction information.
315: Not collective
317: Input Parameter:
318: . grid - The grid
320: Output parameter:
321: . ctx - The context
323: Level: intermediate
325: .keywords: grid, reduction, boundary conditions
326: .seealso: GridSetBCContext(), GridSetBC(), GridAddBC()
327: @*/
328: int GridGetBCContext(Grid grid, void **ctx)
329: {
333: *ctx = grid->reduceContext;
334: return(0);
335: }
337: /*@
338: GridSetBCValuesType - This determines which boundary values are used to reduce
339: the system. It is intended to allow time dependent boundary conditions to be
340: used, and also supports the difference of two sets of values.
342: Collective on Grid
344: Input Parameter:
345: . grid - The grid
347: Level: intermediate
349: .keywords: grid, reduction, boundary conditions
350: .seealso: GridSetBC(), GridAddBC()
351: @*/
352: int GridSetBCValuesType(Grid grid, BCValuesType type)
353: {
356: if (grid->reduceSystem == PETSC_FALSE)
357: return(0);
359: switch(type) {
360: case BC_VALUES:
361: grid->bdReduceVecCur = grid->bdReduceVec;
362: break;
363: case BC_VALUES_OLD:
364: if (grid->bdReduceVecOld == PETSC_NULL) {
365: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Old boundary values not stored");
366: }
367: grid->bdReduceVecCur = grid->bdReduceVecOld;
368: break;
369: case BC_VALUES_DIFF:
370: if (grid->bdReduceVecDiff == PETSC_NULL) {
371: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Difference of boundary values not stored");
372: }
373: grid->bdReduceVecCur = grid->bdReduceVecDiff;
374: break;
375: default:
376: SETERRQ1(PETSC_ERR_ARG_WRONG, "Invalid type %d for boundary value calculation", type);
377: }
378: return(0);
379: }
381: /*---------------------------------------------- Calculation Functions ----------------------------------------------*/
382: /*@C GridCalcPointBCNodes
383: This function recalculates the nodes used for point boundary conditions.
385: Collective on Grid
387: Input Parameter:
388: . grid - The grid
390: Notes:
391: This function is called by GridReform() after the mesh is recalculated.
393: Level: advanced
395: .keywords grid, point BC, node
396: .seealso GridSetBC, GridAddMatOperator, GridAddRhsOperator, GridSetRhsFunction
397: @*/
398: int GridCalcPointBCNodes(Grid grid)
399: {
400: double x, y, z;
401: int bc;
404: for(bc = 0; bc < grid->numPointBC; bc++) {
405: x = grid->pointBC[bc].point[0];
406: y = grid->pointBC[bc].point[1];
407: z = grid->pointBC[bc].point[2];
408: if (GridGetNearestBdNode(grid, grid->pointBC[bc].field, x, y, z, &grid->pointBC[bc].node)) {
409: SETERRQ3(PETSC_ERR_ARG_WRONG, "Invalid point {%g,%g,%g} specified for boundary condition", x, y, z);
410: }
411: }
412: return(0);
413: }
415: int GridSaveBCValues_Private(Grid grid, VarOrdering reduceOrder, Vec reduceVec) {
416: PetscScalar *array, *arrayOld;
417: int ierr;
420: /* Create storage for reduction of Rhs */
421: if (grid->bdReduceVecOld == PETSC_NULL) {
422: GVecDuplicate(reduceVec, &grid->bdReduceVecOld);
423: } else if (((Vec_MPI *) grid->bdReduceVecOld->data)->nghost != ((Vec_MPI *) grid->bdReduceVec->data)->nghost) {
424: GVecDestroy(grid->bdReduceVecOld);
425: GVecDuplicate(reduceVec, &grid->bdReduceVecOld);
426: }
427: /* VecCopy(grid->bdReduceVec, grid->bdReduceVecOld); */
428: VecGetArray(reduceVec, &array);
429: VecGetArray(grid->bdReduceVecOld, &arrayOld);
430: PetscMemcpy(arrayOld, array, reduceOrder->numOverlapVars * sizeof(PetscScalar));
431: VecRestoreArray(reduceVec, &array);
432: VecRestoreArray(grid->bdReduceVecOld, &arrayOld);
433: return(0);
434: }
436: int GridCalcGridBCValues_Private(Grid grid, VarOrdering reduceOrder, Vec reduceVec, void *ctx) {
437: GridBC *gBC = grid->bc;
438: VarOrdering bcOrder;
439: int bc;
440: int ierr;
443: /* Evaluate the vector of boundary values --
444: If order->localStart[field] is NULL, this means the field is not present in the ordering. This is
445: a better check than seeing if the field is active, since we might want to pass in an order on that
446: field to make boundary values for an inactive field.
447: */
448: for(bc = 0; bc < grid->numBC; bc++) {
449: if (gBC[bc].reduce != PETSC_TRUE) continue;
450: if (reduceOrder->localStart[gBC[bc].field] == PETSC_NULL) continue;
451: VarOrderingCreateSubset(reduceOrder, 1, &gBC[bc].field, PETSC_FALSE, &bcOrder);
452: (*grid->ops->gvecevaluatefunctionboundary)(grid, reduceVec, gBC[bc].boundary, bcOrder, gBC[bc].func, 1.0, ctx);
453: VarOrderingDestroy(bcOrder);
454: #ifdef PETSC_USE_BOPT_g
455: #endif
456: }
457: return(0);
458: }
460: int GridCalcPointBCValues_Private(Grid grid, VarOrdering reduceOrder, Vec reduceVec, void *ctx) {
461: GridBC *pBC = grid->pointBC;
462: int **localStart = reduceOrder->localStart;
463: int *offsets = reduceOrder->offsets;
464: int *localOffsets = reduceOrder->localOffsets;
465: FieldClassMap map;
466: VarOrdering bcOrder;
467: PetscScalar *array;
468: double x, y, z;
469: int numNodes, firstVar, rank;
470: int bc, field, numComp, node, nclass, row;
471: int ierr;
474: MPI_Comm_rank(grid->comm, &rank);
475: VarOrderingGetClassMap(reduceOrder, &map);
476: numNodes = map->numNodes;
477: firstVar = reduceOrder->firstVar[rank];
478: /* Evaluate the vector of boundary values --
479: If order->localStart[field] is NULL, this means the field is not present in the ordering. This is
480: a better check than seeing if the field is active, since we might want to pass in an order on that
481: field to make boundary values for an inactive field.
482: */
483: VecGetArray(reduceVec, &array);
484: for(bc = 0; bc < grid->numPointBC; bc++) {
485: if (pBC[bc].reduce != PETSC_TRUE) continue;
486: if (reduceOrder->localStart[pBC[bc].field] == PETSC_NULL) continue;
487: field = pBC[bc].field;
488: numComp = grid->fields[field].numComp;
489: node = pBC[bc].node;
490: nclass = map->classes[node];
491: VarOrderingCreateSubset(reduceOrder, 1, &field, PETSC_FALSE, &bcOrder);
493: if (node >= numNodes) {
494: row = localOffsets[node-numNodes];
495: } else {
496: row = offsets[node] - firstVar + localStart[field][nclass];
497: }
498: x = pBC[bc].point[0];
499: y = pBC[bc].point[1];
500: z = pBC[bc].point[2];
501: (*pBC[bc].func)(1, numComp, &x, &y, &z, &array[row], ctx);
503: VarOrderingDestroy(bcOrder);
504: #ifdef PETSC_USE_BOPT_g
505: #endif
506: }
507: VecRestoreArray(reduceVec, &array);
508: return(0);
509: }
511: int GridCalcBCValues_Private(Grid grid, VarOrdering reduceOrder, Vec reduceVec, PetscTruth save, void *ctx) {
512: PetscScalar *array;
513: int numGhostVars;
514: int ierr;
517: numGhostVars = reduceOrder->numOverlapVars - reduceOrder->numLocVars;
518: if (((Vec_MPI *) reduceVec->data)->nghost != numGhostVars) {
519: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid reduce vector size %d should be %d", ((Vec_MPI *) reduceVec->data)->nghost, numGhostVars);
520: }
522: if (save == PETSC_TRUE) {
523: GridSaveBCValues_Private(grid, reduceOrder, reduceVec);
524: }
526: /* Initialize vector */
527: /* VecSet(&zero, reduceVec); */
528: VecGetArray(reduceVec, &array);
529: PetscMemzero(array, reduceOrder->numOverlapVars * sizeof(PetscScalar));
530: VecRestoreArray(reduceVec, &array);
532: GridCalcGridBCValues_Private(grid, reduceOrder, reduceVec, ctx);
533: GridCalcPointBCValues_Private(grid, reduceOrder, reduceVec, ctx);
535: return(0);
536: }
538: /*@
539: GridCalcBCValues - This function calculates the boundary values. It
540: is normally called once a timestep when using time dependent boundary
541: conditions.
543: Collective on Grid
545: Input Parameters:
546: + grid - The grid
547: . save - A flag used to store old values, usually for timestepping
548: - ctx - The context
550: Level: advanced
552: .keywords: grid, reduction, boundary conditions
553: .seealso: GridSetBCContext(), GridSetBC(), GridAddBC()
554: @*/
555: int GridCalcBCValues(Grid grid, PetscTruth save, void *ctx)
556: {
562: if (grid->reduceSystem == PETSC_TRUE) {
564: GridCalcBCValues_Private(grid, grid->reduceOrder, grid->bdReduceVec, save, ctx);
565: }
567: return(0);
568: }
570: /*@
571: GridCalcBCValuesDifference - This function calculates the difference of the
572: last two sets of boundary values and puts it in an internal vector. This is
573: is normally used to implement the Rhs time derivative in a GTS.
575: Collective on Grid
577: Input Parameter:
578: . grid - The grid
580: Level: advanced
582: .keywords: grid, reduction, boundary conditions
583: .seealso: GridSetBCContext(), GridSetBC(), GridAddBC()
584: @*/
585: int GridCalcBCValuesDifference(Grid grid)
586: {
587: PetscScalar *array, *arrayOld, *arrayDiff;
588: int numGhostVars;
589: register int i, n;
590: int ierr;
594: if (grid->reduceSystem == PETSC_TRUE) {
595: numGhostVars = grid->reduceOrder->numOverlapVars - grid->reduceOrder->numLocVars;
596: if (((Vec_MPI *) grid->bdReduceVec->data)->nghost != numGhostVars) {
597: SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid reduce vector size");
598: }
599: if (grid->bdReduceVecOld == PETSC_NULL) {
600: SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "No previous boundary values");
601: }
602: /* Create storage for reduction of Rhs */
603: if (grid->bdReduceVecDiff == PETSC_NULL) {
604: GVecDuplicate(grid->bdReduceVec, &grid->bdReduceVecDiff);
605: } else if (((Vec_MPI *) grid->bdReduceVecDiff->data)->nghost != ((Vec_MPI *) grid->bdReduceVec->data)->nghost) {
606: GVecDestroy(grid->bdReduceVecDiff);
607: GVecDuplicate(grid->bdReduceVec, &grid->bdReduceVecDiff);
608: }
609: /* VecWAXPY(&minusOne, grid->bdReduceVecOld, grid->bdReduceVec, grid->bdReduceVecDiff); */
610: VecGetArray(grid->bdReduceVec, &array);
611: VecGetArray(grid->bdReduceVecOld, &arrayOld);
612: VecGetArray(grid->bdReduceVecDiff, &arrayDiff);
613: n = grid->reduceOrder->numOverlapVars;
614: PetscLogFlops(n);
615: for(i = 0; i < n; i++)
616: arrayDiff[i] = array[i] - arrayOld[i];
617: VecRestoreArray(grid->bdReduceVec, &array);
618: VecRestoreArray(grid->bdReduceVecOld, &arrayOld);
619: VecRestoreArray(grid->bdReduceVecDiff, &arrayDiff);
620: }
621: #ifdef PETSC_USE_BOPT_g
622: #endif
624: return(0);
625: }
627: /*----------------------------------------------- Reduction Functions -----------------------------------------------*/
628: /*@C
629: GridSetReduceSystem - This function determines whether unknowns associated
630: with boundary conditions are eliminated from the system.
632: Collective on Grid
634: Input Parameters:
635: + grid - The grid
636: - reduce - The flag for explicit reduction of the system
638: Level: intermediate
640: .keywords grid, boundary condition, reduce
641: .seealso GridGetReduceSystem(), GridSetBC(), GridAddBC(), GridSetPointBC(), GridAddPointBC()
642: @*/
643: int GridSetReduceSystem(Grid grid, PetscTruth reduce)
644: {
647: grid->reduceSystem = reduce;
648: return(0);
649: }
651: /*@C
652: GridGetReduceSystem - This function reveals whether unknowns associated
653: with boundary conditions are eliminated from the system.
655: Not collective
657: Input Parameter:
658: . grid - The grid
660: Output Parameter:
661: . reduce - The flag for explicit reduction of the system
663: Level: intermediate
665: .keywords grid, boundary condition, reduce
666: .seealso GridSetReduceSystem(), GridSetBC(), GridAddBC(), GridSetPointBC(), GridAddPointBC()
667: @*/
668: int GridGetReduceSystem(Grid grid, PetscTruth *reduce)
669: {
673: *reduce = grid->reduceSystem;
674: return(0);
675: }
677: /*@C
678: GridSetReduceElement - This function determines whether element matrices and vectors
679: are reduced on the fly, or if boundary operators are stored and applied.
681: Collective on Grid
683: Input Parameters:
684: + grid - The grid
685: - reduce - The flag for explicit reduction of the system
687: Level: intermediate
689: .keywords grid, boundary condition, reduce, element
690: .seealso GridGetReduceElement(), GridSetBC(), GridAddBC(), GridSetPointBC(), GridAddPointBC()
691: @*/
692: int GridSetReduceElement(Grid grid, PetscTruth reduce)
693: {
696: grid->reduceElement = reduce;
697: return(0);
698: }
700: /*@C
701: GridGetReduceElement - This function indicates whether element matrices and vectors
702: are reduced on the fly, or if boundary operators are stored and applied.
704: Not collective
706: Input Parameter:
707: . grid - The grid
709: Output Parameter:
710: . reduce - The flag for explicit reduction of the system
712: Level: intermediate
714: .keywords grid, boundary condition, reduce, element
715: .seealso GridSetReduceElement(), GridSetBC(), GridAddBC(), GridSetPointBC(), GridAddPointBC()
716: @*/
717: int GridGetReduceElement(Grid grid, PetscTruth *reduce)
718: {
722: *reduce = grid->reduceElement;
723: return(0);
724: }
726: /*---------------------------------------------- Application Functions ----------------------------------------------*/
727: /*
728: GridResetConstrainedMultiply_Private - This function resets the mulplication routine for constrained matrices
730: Input Parameters:
731: + grid - The Grid
732: - A - The GMat
734: Level: developer
736: .keywords Grid, GMat, reset, constrained, multiply
737: .seealso GridEvaluateRhs
738: */
739: int GridResetConstrainedMultiply_Private(Grid grid, GMat A) {
740: PetscTruth isConstrained, explicitConstraints;
741: void (*oldMult)(void);
742: int ierr;
745: GridIsConstrained(grid, &isConstrained);
746: GridGetExplicitConstraints(grid, &explicitConstraints);
747: if (isConstrained == PETSC_TRUE) {
748: if (explicitConstraints == PETSC_FALSE) {
749: MatShellGetOperation(A, MATOP_MULT, &oldMult);
750: if (oldMult != (void (*)(void)) GMatMatMultConstrained) {
751: MatShellSetOperation(A, MATOP_MULT_CONSTRAINED, oldMult);
752: }
753: MatShellSetOperation(A, MATOP_MULT, (void (*)(void)) GMatMatMultConstrained);
755: MatShellGetOperation(A, MATOP_MULT_TRANSPOSE, &oldMult);
756: if (oldMult != (void (*)(void)) GMatMatMultTransposeConstrained) {
757: MatShellSetOperation(A, MATOP_MULT_TRANSPOSE_CONSTRAINED, oldMult);
758: }
759: MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, (void (*)(void)) GMatMatMultTransposeConstrained);
760: } else {
761: MatShellGetOperation(A, MATOP_MULT_CONSTRAINED, &oldMult);
762: if (oldMult != PETSC_NULL) {
763: MatShellSetOperation(A, MATOP_MULT, oldMult);
764: }
766: MatShellGetOperation(A, MATOP_MULT_TRANSPOSE_CONSTRAINED, &oldMult);
767: if (oldMult != PETSC_NULL) {
768: MatShellSetOperation(A, MATOP_MULT_TRANSPOSE, oldMult);
769: }
770: }
771: }
772: return(0);
773: }
775: /*@C GridSetBoundary
776: This function sets Dirchlet boundary conditions on the linear problem arising
777: from the underlying grid.
779: Collective on Grid
781: Input Parameters:
782: + bd - The marker for the boundary to apply conditions along
783: . field - The field to which the conditions apply
784: . diag - The scalar to be placed on the diagonal
785: . f - The function which defines the boundary condition
786: - ctx - The user-supplied context
788: Output Parameters:
789: + A - The system matrix
790: - b - The Rhs vector
792: Level: intermediate
794: .keywords boundary conditions, finite element
795: .seealso MeshGetBoundaryStart
796: @*/
797: int GridSetBoundary(int bd, int field, PetscScalar diag, PointFunction f, GMat A, GVec b, void *ctx)
798: {
799: Grid grid;
800: int ierr;
804: GMatGetGrid(A, &grid);
805: GridSetBoundaryRectangular(bd, field, diag, f, grid->order, A, b, ctx);
806: return(0);
807: }
809: /*@C GridSetBoundaryRectangular
810: This function sets Dirchlet boundary conditions on the linear problem arising
811: from the underlying grid, and the default variable ordering can be overridden.
813: Collective on Grid
815: Input Parameters:
816: + bd - The marker for the boundary to apply conditions along
817: . field - The field to which the conditions apply
818: . diag - The scalar to be placed on the diagonal
819: . f - The function which defines the boundary condition
820: . order - The test variable ordering
821: - ctx - The user-supplied context
823: Output Parameters:
824: + A - The system matrix
825: - b - The Rhs vector
827: Level: advanced
829: .keywords boundary conditions, finite element
830: .seealso MeshGetBoundaryStart
831: @*/
832: int GridSetBoundaryRectangular(int bd, int field, PetscScalar diag, PointFunction f, VarOrdering order, GMat A, GVec b, void *ctx)
833: {
834: Grid grid, grid2;
835: Mesh mesh;
836: int comp; /* The number of field components */
837: int size; /* The number of nodes in the boundary */
838: int *localStart; /* The offset of this field on a node of a given class */
839: int node; /* The canonical node number of the current boundary node */
840: int nclass; /* The class of the current boundary node */
841: double *x, *y, *z; /* Coordinates of the boundary nodes */
842: int vars; /* The number of variables affected (var/node * size) */
843: int *offsets; /* The canonical variable number for the first variable on each node */
844: int *rows; /* Rows corresponding to boundary nodes */
845: PetscScalar *values; /* Boundary values */
846: PetscScalar elem = diag;
847: IS is;
848: int rank;
849: int i, j, count;
850: #ifdef PETSC_USE_BOPT_g
851: PetscTruth opt;
852: #endif
853: int ierr;
859: GMatGetGrid(A, &grid);
860: GVecGetGrid(b, &grid2);
861: if (grid != grid2) SETERRQ(PETSC_ERR_ARG_INCOMP, "Matrix and vector have different underlying grids");
862: GridValidField(grid, field);
863: MPI_Comm_rank(grid->comm, &rank);
864: mesh = grid->mesh;
865: comp = grid->fields[field].disc->comp;
866: offsets = order->offsets;
867: localStart = order->localStart[field];
869: /* Support for constrained problems */
870: VecGetSize(b, &size);
871: if (grid->isConstrained) {
872: if (size != grid->constraintOrder->numVars) SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid vector size");
873: offsets = grid->constraintOrder->offsets;
874: } else {
875: if (size != grid->order->numVars) SETERRQ(PETSC_ERR_ARG_WRONG, "Invalid vector size");
876: }
878: /* Allocate memory */
879: GridGetBoundarySize(grid, bd, field, &size);
880: if (size == 0) {
881: #ifdef PETSC_USE_BOPT_g
882: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
883: if (opt == PETSC_TRUE) {
884: PetscSynchronizedFlush(grid->comm);
885: PetscSynchronizedFlush(grid->comm);
886: }
887: #endif
888: VecAssemblyBegin(b);
889: VecAssemblyEnd(b);
890: ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &is);
891: MatZeroRows(A, is, &elem);
892: ISDestroy(is);
893: return(0);
894: }
895: vars = size*comp;
896: PetscMalloc(size * sizeof(double), &x);
897: PetscMalloc(size * sizeof(double), &y);
898: PetscMalloc(size * sizeof(double), &z);
899: PetscMalloc(vars * sizeof(PetscScalar), &values);
900: PetscMalloc(vars * sizeof(int), &rows);
902: /* Loop over boundary nodes */
903: GridGetBoundaryStart(grid, bd, field, PETSC_FALSE, &node, &nclass);
904: for(i = 0, count = 0; node >= 0; i++) {
905: for(j = 0; j < comp; j++, count++) {
906: rows[count] = offsets[node] + j + localStart[nclass];
907: #ifdef PETSC_USE_BOPT_g
908: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
909: if (opt == PETSC_TRUE) {
910: PetscSynchronizedPrintf(grid->comm, "[%d]bd %d field: %d node: %d row: %d class: %dn",
911: rank, bd, field, node, rows[count], nclass);
912: }
913: #endif
914: }
915: MeshGetNodeCoords(mesh, node, &x[i], &y[i], &z[i]);
916: GridGetBoundaryNext(grid, bd, field, PETSC_FALSE, &node, &nclass);
917: }
918: #ifdef PETSC_USE_BOPT_g
919: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
920: if (opt == PETSC_TRUE) {
921: PetscSynchronizedFlush(grid->comm);
922: }
923: #endif
924: /* Get boundary values */
925: (*f)(size, comp, x, y, z, values, ctx);
926: /* Put values in Rhs */
927: #ifdef PETSC_USE_BOPT_g
928: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
929: if (opt == PETSC_TRUE) {
930: PetscPrintf(grid->comm, "Setting boundary values in rhs bd %d field %dn", bd, field);
931: for(i = 0; i < vars; i++) PetscSynchronizedPrintf(grid->comm, " row: %d val: %gn", rows[i], PetscRealPart(values[i]));
932: PetscSynchronizedFlush(grid->comm);
933: }
934: #endif
935: VecSetValues(b, vars, rows, values, INSERT_VALUES);
936: VecAssemblyBegin(b);
937: VecAssemblyEnd(b);
938: /* Set rows of A to the identity */
939: ISCreateGeneral(PETSC_COMM_SELF, vars, rows, &is);
940: MatZeroRows(A, is, &elem);
941: ISDestroy(is);
943: GridResetConstrainedMultiply_Private(grid, A);
945: PetscFree(x);
946: PetscFree(y);
947: PetscFree(z);
948: PetscFree(values);
949: PetscFree(rows);
950: return(0);
951: }
953: /*------------------------------------------------- Matrix Functions ------------------------------------------------*/
954: /*@C GridSetMatBoundary
955: This function sets Dirchlet boundary conditions on the linear system matrix arising
956: from the underlying grid.
958: Collective on GMat
960: Input Parameters:
961: + bd - The marker for the boundary to apply conditions along
962: . field - The field to which the conditions apply
963: . diag - The scalar to be placed on the diagonal
964: - ctx - The user-supplied context
966: Output Parameter:
967: . A - The system matrix
969: Level: advanced
971: .keywords boundary conditions, finite element
972: .seealso MeshGetBoundaryStart
973: @*/
974: int GridSetMatBoundary(int bd, int field, PetscScalar diag, GMat A, void *ctx)
975: {
976: Grid grid;
977: int ierr;
981: GMatGetGrid(A, &grid);
982: GridSetMatBoundaryRectangular(1, &bd, &field, diag, grid->order, A, ctx);
983: return(0);
984: }
986: /*@C GridSetMatBoundaryRectangular
987: This function sets Dirchlet boundary conditions on the linear system matrix arising
988: from the underlying grid, and the default variable ordering can be overridden.
990: Collective on GMat
992: Input Parameters:
993: + num - The number of boundary conditions
994: . bd - The markers for each boundary to apply conditions along
995: . field - The fields to which the conditions apply
996: . diag - The scalar to be placed on the diagonal
997: . order - The test variable ordering
998: - ctx - The user-supplied context
1000: Output Parameter:
1001: . A - The system matrix
1003: Level: advanced
1005: .keywords boundary conditions, finite element
1006: .seealso MeshGetBoundaryStart
1007: @*/
1008: int GridSetMatBoundaryRectangular(int num, int *bd, int *field, PetscScalar diag, VarOrdering order, GMat A, void *ctx)
1009: {
1010: Grid grid;
1011: int comp; /* The number of field components */
1012: int size; /* The number of nodes in the boundary */
1013: int totSize; /* The number of nodes in all boundaries */
1014: int *localStart; /* The offset of this field on a node of a given class */
1015: int node; /* The canonical node number of the current boundary node */
1016: int nclass; /* The class of the current boundary node */
1017: int vars; /* The number of variables affected (var/node * size) */
1018: int *offsets; /* The canonical variable number for the first variable on each node */
1019: int *rows; /* Rows corresponding to boundary nodes */
1020: PetscScalar elem = diag;
1021: IS is;
1022: int rank;
1023: int b, i, j, count;
1024: #ifdef PETSC_USE_BOPT_g
1025: PetscTruth opt;
1026: #endif
1027: int ierr;
1032: GMatGetGrid(A, &grid);
1033: offsets = order->offsets;
1034: MPI_Comm_rank(grid->comm, &rank);
1036: /* Allocate memory */
1037: for(b = 0, totSize = 0, vars = 0; b < num; b++) {
1038: GridValidField(grid, field[b]);
1039: GridGetBoundarySize(grid, bd[b], field[b], &size);
1040: totSize += size;
1041: vars += size*grid->fields[field[b]].disc->comp;
1042: }
1043: if (totSize == 0) {
1044: #ifdef PETSC_USE_BOPT_g
1045: PetscSynchronizedFlush(grid->comm);
1046: #endif
1047: ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &is);
1048: MatZeroRows(A, is, &elem);
1049: ISDestroy(is);
1050: return(0);
1051: }
1052: PetscMalloc(vars * sizeof(int), &rows);
1054: /* Loop over boundaries */
1055: for(b = 0, count = 0; b < num; b++) {
1056: comp = grid->fields[field[b]].disc->comp;
1057: localStart = order->localStart[field[b]];
1058: /* Loop over boundary nodes */
1059: GridGetBoundaryStart(grid, bd[b], field[b], PETSC_FALSE, &node, &nclass);
1060: for(i = 0; node >= 0; i++) {
1061: for(j = 0; j < comp; j++, count++) {
1062: rows[count] = offsets[node] + j + localStart[nclass];
1063: #ifdef PETSC_USE_BOPT_g
1064: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1065: if (opt == PETSC_TRUE) {
1066: PetscSynchronizedPrintf(grid->comm, "[%d]bd %d field: %d node: %d row: %d class: %dn",
1067: rank, bd[b], field[b], node, rows[count], nclass);
1068: }
1069: #endif
1070: }
1071: GridGetBoundaryNext(grid, bd[b], field[b], PETSC_FALSE, &node, &nclass);
1072: }
1073: }
1074: #ifdef PETSC_USE_BOPT_g
1075: PetscSynchronizedFlush(grid->comm);
1076: if (count != vars) SETERRQ2(PETSC_ERR_PLIB, "Boundary size %d should be %d", count, vars);
1077: #endif
1078: /* Set rows of A to the identity */
1079: ISCreateGeneral(PETSC_COMM_SELF, vars, rows, &is);
1080: MatZeroRows(A, is, &elem);
1081: ISDestroy(is);
1083: GridResetConstrainedMultiply_Private(grid, A);
1085: PetscFree(rows);
1086: return(0);
1087: }
1089: /*@C GridSetMatPointBoundary
1090: This function sets Dirchlet boundary conditions on the linear system matrix arising
1091: from the underlying grid.
1093: Collective on GMat
1095: Input Parameters:
1096: + node - The constrained node
1097: . field - The field to which the conditions apply
1098: . diag - The scalar to be placed on the diagonal
1099: - ctx - The user-supplied context
1101: Output Parameter:
1102: . A - The system matrix
1104: Level: advanced
1106: .keywords boundary conditions, finite element
1107: .seealso MeshGetBoundaryStart
1108: @*/
1109: int GridSetMatPointBoundary(int node, int field, PetscScalar diag, GMat A, void *ctx)
1110: {
1111: Grid grid;
1112: int ierr;
1116: GMatGetGrid(A, &grid);
1117: GridSetMatPointBoundaryRectangular(node, field, diag, grid->order, A, ctx);
1118: return(0);
1119: }
1121: /*@C GridSetMatPointBoundaryRectangular
1122: This function sets Dirchlet boundary conditions on the linear system matrix arising
1123: from the underlying grid, and the default variable ordering can be overridden.
1125: Collective on GMat
1127: Input Parameters:
1128: + node - The constrained node
1129: . field - The field to which the conditions apply
1130: . diag - The scalar to be placed on the diagonal
1131: . order - The test variable ordering
1132: - ctx - The user-supplied context
1134: Output Parameter:
1135: . A - The system matrix
1137: Level: advanced
1139: .keywords boundary conditions, finite element
1140: .seealso MeshGetBoundaryStart
1141: @*/
1142: int GridSetMatPointBoundaryRectangular(int node, int field, PetscScalar diag, VarOrdering order, GMat A, void *ctx)
1143: {
1144: Grid grid;
1145: int comp; /* The number of field components */
1146: int *localStart; /* The offset of this field on a node of a given class */
1147: int nclass; /* The class of the current boundary node */
1148: int *offsets; /* The canonical variable number for the first variable on each node */
1149: int *rows; /* Rows corresponding to boundary nodes */
1150: PetscScalar elem = diag;
1151: IS is;
1152: int rank;
1153: int j;
1154: #ifdef PETSC_USE_BOPT_g
1155: PetscTruth opt;
1156: #endif
1157: int ierr;
1160: if (node < 0) {
1161: ISCreateStride(PETSC_COMM_SELF, 0, 0, 1, &is);
1162: MatZeroRows(A, is, &elem);
1163: ISDestroy(is);
1164: return(0);
1165: }
1168: GMatGetGrid(A, &grid);
1169: GridValidField(grid, field);
1170: MPI_Comm_rank(grid->comm, &rank);
1171: comp = grid->fields[field].disc->comp;
1172: offsets = order->offsets;
1173: localStart = order->localStart[field];
1175: /* Allocate memory */
1176: PetscMalloc(comp * sizeof(int), &rows);
1178: GridGetNodeClass(grid, node, &nclass);
1179: for(j = 0; j < comp; j++) {
1180: rows[j] = offsets[node] + j + localStart[nclass];
1181: #ifdef PETSC_USE_BOPT_g
1182: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1183: if (opt == PETSC_TRUE) {
1184: PetscPrintf(PETSC_COMM_SELF, "[%d]field: %d node: %d row: %d class: %dn", rank, field, node, rows[j], nclass);
1185: }
1186: #endif
1187: }
1188: #ifdef PETSC_USE_BOPT_g
1189: #endif
1190: /* Set rows of A to the identity */
1191: ISCreateGeneral(PETSC_COMM_SELF, comp, rows, &is);
1192: MatZeroRows(A, is, &elem);
1193: ISDestroy(is);
1195: GridResetConstrainedMultiply_Private(grid, A);
1197: PetscFree(rows);
1198: return(0);
1199: }
1201: /*------------------------------------------------- Vector Functions ------------------------------------------------*/
1202: /*@C GridSetVecBoundary
1203: This function sets Dirchlet boundary conditions on the linear Rhs arising
1204: from the underlying grid.
1206: Collective on GVec
1208: Input Parameters:
1209: + bd - The marker for the boundary to apply conditions along
1210: . field - The field to which the conditions apply
1211: . f - The function which defines the boundary condition
1212: - ctx - The user-supplied context
1214: Output Parameter:
1215: . b - The Rhs vector
1217: Level: advanced
1219: .keywords boundary conditions, finite element
1220: .seealso MeshGetBoundaryStart
1221: @*/
1222: int GridSetVecBoundary(int bd, int field, PointFunction f, GVec b, void *ctx)
1223: {
1224: Grid grid;
1225: int ierr;
1229: GVecGetGrid(b, &grid);
1230: GridSetVecBoundaryRectangular(1, &bd, &field, &f, grid->order, b, ctx);
1231: return(0);
1232: }
1234: /*@C GridSetVecBoundaryRectangular
1235: This function sets Dirchlet boundary conditions on the linear Rhs arising
1236: from the underlying grid, and the default variable ordering can be overridden.
1238: Collective on GVec
1240: Input Parameters:
1241: + num - The number of boundary conditions
1242: . bd - The markers for each boundary to apply conditions along
1243: . field - The fields to which the conditions apply
1244: . f - The functions which define the boundary conditions
1245: . order - The test variable ordering
1246: - ctx - The user-supplied context
1248: Output Parameter:
1249: . b - The Rhs vector
1251: Level: advanced
1253: .keywords boundary conditions, finite element
1254: .seealso MeshGetBoundaryStart
1255: @*/
1256: int GridSetVecBoundaryRectangular(int num, int *bd, int *field, PointFunction *f, VarOrdering order, GVec b, void *ctx)
1257: {
1258: Grid grid;
1259: Mesh mesh;
1260: int comp; /* The number of field components */
1261: int *sizes; /* The number of nodes in each boundary */
1262: int totSize; /* The number of nodes in all boundaries */
1263: int maxSize; /* The maximum number of nodes in any boundary */
1264: int *localStart; /* The offset of this field on a node of a given class */
1265: int node; /* The canonical node number of the current boundary node */
1266: int nclass; /* The class of the current boundary node */
1267: double *x, *y, *z; /* Coordinates of the boundary nodes */
1268: int vars; /* The number of variables affected (var/node * size) */
1269: int *offsets; /* The canonical variable number for the first variable on each node */
1270: int *rows; /* Rows corresponding to boundary nodes */
1271: PetscScalar *values; /* Boundary values */
1272: int size, rank;
1273: int c, i, j, count, off;
1274: #ifdef PETSC_USE_BOPT_g
1275: PetscTruth opt;
1276: #endif
1277: int ierr;
1282: GVecGetGrid(b, &grid);
1283: mesh = grid->mesh;
1284: offsets = order->offsets;
1285: MPI_Comm_rank(grid->comm, &rank);
1287: /* Support for constrained problems */
1288: VecGetSize(b, &size);
1289: if (grid->isConstrained) {
1290: if (size != grid->constraintOrder->numVars) {
1291: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->constraintOrder->numVars);
1292: }
1293: offsets = grid->constraintOrder->offsets;
1294: } else {
1295: if (size != grid->order->numVars) {
1296: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->order->numVars);
1297: }
1298: }
1300: /* Allocate memory */
1301: PetscMalloc(num * sizeof(int), &sizes);
1302: for(c = 0, totSize = 0, maxSize = 0, vars = 0; c < num; c++) {
1303: GridValidField(grid, field[c]);
1304: GridGetBoundarySize(grid, bd[c], field[c], &sizes[c]);
1305: totSize += sizes[c];
1306: maxSize = PetscMax(maxSize, sizes[c]);
1307: vars += sizes[c]*grid->fields[field[c]].disc->comp;
1308: }
1309: if (totSize == 0) {
1310: #ifdef PETSC_USE_BOPT_g
1311: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1312: if (opt == PETSC_TRUE) {
1313: PetscSynchronizedFlush(grid->comm);
1314: PetscSynchronizedFlush(grid->comm);
1315: }
1316: #endif
1317: VecAssemblyBegin(b);
1318: VecAssemblyEnd(b);
1319: return(0);
1320: }
1321: PetscMalloc(maxSize * sizeof(double), &x);
1322: PetscMalloc(maxSize * sizeof(double), &y);
1323: PetscMalloc(maxSize * sizeof(double), &z);
1324: PetscMalloc(vars * sizeof(PetscScalar), &values);
1325: PetscMalloc(vars * sizeof(int), &rows);
1327: /* Loop over boundaries */
1328: for(c = 0, count = 0, off = 0; c < num; c++, off = count) {
1329: if (sizes[c] == 0) {
1330: #ifdef PETSC_USE_BOPT_g
1331: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1332: if (opt == PETSC_TRUE) {
1333: PetscSynchronizedFlush(grid->comm);
1334: PetscSynchronizedFlush(grid->comm);
1335: }
1336: #endif
1337: continue;
1338: }
1339: comp = grid->fields[field[c]].disc->comp;
1340: localStart = order->localStart[field[c]];
1341: /* Loop over boundary nodes */
1342: GridGetBoundaryStart(grid, bd[c], field[c], PETSC_FALSE, &node, &nclass);
1343: for(i = 0; node >= 0; i++) {
1344: for(j = 0; j < comp; j++, count++) {
1345: rows[count] = offsets[node] + j + localStart[nclass];
1346: #ifdef PETSC_USE_BOPT_g
1347: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1348: if (opt == PETSC_TRUE) {
1349: PetscSynchronizedPrintf(grid->comm, "[%d]bd %d field: %d node: %d row: %d class: %dn",
1350: rank, bd[c], field[c], node, rows[count], nclass);
1351: }
1352: #endif
1353: }
1354: MeshGetNodeCoords(mesh, node, &x[i], &y[i], &z[i]);
1355: GridGetBoundaryNext(grid, bd[c], field[c], PETSC_FALSE, &node, &nclass);
1356: }
1357: #ifdef PETSC_USE_BOPT_g
1358: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1359: if (opt == PETSC_TRUE) {
1360: PetscSynchronizedFlush(grid->comm);
1361: }
1362: #endif
1363: /* Get boundary values */
1364: (*(f[c]))(sizes[c], comp, x, y, z, &values[off], ctx);
1365: #ifdef PETSC_USE_BOPT_g
1366: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1367: if (opt == PETSC_TRUE) {
1368: PetscPrintf(grid->comm, "Setting boundary values in rhs bd %d field %dn", bd[c], field[c]);
1369: for(i = off; i < count; i++) PetscSynchronizedPrintf(grid->comm, " row: %d val: %gn", rows[i], PetscRealPart(values[i]));
1370: PetscSynchronizedFlush(grid->comm);
1371: }
1372: #endif
1373: }
1374: if (count != vars) SETERRQ2(PETSC_ERR_PLIB, "Boundary size %d should be %d", count, vars);
1375: /* Put values in Rhs */
1376: VecSetValues(b, vars, rows, values, INSERT_VALUES);
1377: VecAssemblyBegin(b);
1378: VecAssemblyEnd(b);
1380: PetscFree(sizes);
1381: PetscFree(x);
1382: PetscFree(y);
1383: PetscFree(z);
1384: PetscFree(values);
1385: PetscFree(rows);
1386: return(0);
1387: }
1389: /*@C GridSetVecPointBoundary
1390: This function sets Dirchlet boundary conditions on the linear Rhs arising
1391: from the underlying grid.
1393: Collective on GVec
1395: Input Parameters:
1396: + node - The constrained node
1397: . field - The field to which the conditions apply
1398: . f - The function which defines the boundary condition
1399: - ctx - The user-supplied context
1401: Output Parameter:
1402: . b - The Rhs vector
1404: Level: advanced
1406: .keywords boundary conditions, finite element
1407: .seealso MeshGetBoundaryStart
1408: @*/
1409: int GridSetVecPointBoundary(int node, int field, PointFunction f, GVec b, void *ctx)
1410: {
1411: Grid grid;
1412: int ierr;
1416: GVecGetGrid(b, &grid);
1417: GridSetVecPointBoundaryRectangular(node, field, f, grid->order, b, ctx);
1418: return(0);
1419: }
1421: /*@C GridSetVecPointBoundaryRectangular
1422: This function sets Dirchlet boundary conditions on the linear Rhs arising
1423: from the underlying grid, and the default variable ordering can be overridden.
1425: Collective on GVec
1427: Input Parameters:
1428: + node - The constriained node
1429: . field - The field to which the conditions apply
1430: . f - The function which defines the boundary condition
1431: . order - The test variable ordering
1432: - ctx - The user-supplied context
1434: Output Parameter:
1435: . b - The Rhs vector
1437: Level: advanced
1439: .keywords boundary conditions, finite element
1440: .seealso MeshGetBoundaryStart
1441: @*/
1442: int GridSetVecPointBoundaryRectangular(int node, int field, PointFunction f, VarOrdering order, GVec b, void *ctx)
1443: {
1444: Grid grid;
1445: Mesh mesh;
1446: int comp; /* The number of field components */
1447: int size; /* The number of nodes in the boundary */
1448: int *localStart; /* The offset of this field on a node of a given class */
1449: int nclass; /* The class of the current boundary node */
1450: double x, y, z; /* Coordinates of the boundary nodes */
1451: int *offsets; /* The canonical variable number for the first variable on each node */
1452: int *rows; /* Rows corresponding to boundary nodes */
1453: PetscScalar *values; /* Boundary values */
1454: int rank;
1455: int c;
1456: #ifdef PETSC_USE_BOPT_g
1457: PetscTruth opt;
1458: #endif
1459: int ierr;
1462: if (node < 0) {
1463: VecAssemblyBegin(b);
1464: VecAssemblyEnd(b);
1465: return(0);
1466: }
1469: GVecGetGrid(b, &grid);
1470: GridValidField(grid, field);
1471: MPI_Comm_rank(grid->comm, &rank);
1472: mesh = grid->mesh;
1473: comp = grid->fields[field].disc->comp;
1474: offsets = order->offsets;
1475: localStart = order->localStart[field];
1478: /* Support for constrained problems */
1479: VecGetSize(b, &size);
1480: if (grid->isConstrained) {
1481: if (size != grid->constraintOrder->numVars) {
1482: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->constraintOrder->numVars);
1483: }
1484: offsets = grid->constraintOrder->offsets;
1485: } else {
1486: if (size != grid->order->numVars) {
1487: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->order->numVars);
1488: }
1489: }
1491: /* Allocate memory */
1492: size = 1;
1493: PetscMalloc(comp * sizeof(PetscScalar), &values);
1494: PetscMalloc(comp * sizeof(int), &rows);
1496: MeshGetNodeCoords(mesh, node, &x, &y, &z);
1497: GridGetNodeClass(grid, node, &nclass);
1498: for(c = 0; c < comp; c++) {
1499: rows[c] = offsets[node] + c + localStart[nclass];
1500: #ifdef PETSC_USE_BOPT_g
1501: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1502: if (opt == PETSC_TRUE) {
1503: PetscPrintf(PETSC_COMM_SELF, "[%d]field: %d node: %d row: %d class: %dn", rank, field, node, rows[c], nclass);
1504: }
1505: #endif
1506: }
1507: /* Get boundary values */
1508: (*f)(size, comp, &x, &y, &z, values, ctx);
1509: /* Put values in Rhs */
1510: #ifdef PETSC_USE_BOPT_g
1511: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1512: if (opt == PETSC_TRUE) {
1513: PetscPrintf(PETSC_COMM_SELF, "Setting boundary values on rhs node %d field %dn", node, field);
1514: for(c = 0; c < comp; c++) PetscPrintf(PETSC_COMM_SELF, " row: %d val: %gn", rows[c], PetscRealPart(values[c]));
1515: }
1516: #endif
1517: VecSetValues(b, comp, rows, values, INSERT_VALUES);
1518: VecAssemblyBegin(b);
1519: VecAssemblyEnd(b);
1521: PetscFree(values);
1522: PetscFree(rows);
1523: return(0);
1524: }
1526: /*@C GridSetVecBoundaryDifference
1527: This function sets Dirchlet boundary conditions on the linear Rhs arising
1528: from the underlying grid, but actually sets it to the difference of the
1529: function value and the value in the given vector. This is commonly used in
1530: a time dependent, nonlinear problem for which we would like the rhs boundary
1531: values to be:
1533: U^{n+1}_k - U^{n+1}_{k+1}
1535: where n is the time iteration index, and k is the Newton iteration index. This
1536: means that the solution will be updated to U^{n+1}_{k+1} if the Jacobian is the
1537: identity for that row. This is very useful for time dependent boundary conditions
1538: for which the traditional method of letting the rhs value be zero does not work.
1540: Collective on GVec
1542: Input Parameters:
1543: + bd - The marker for the boundary to apply conditions along
1544: . u - A grid vector, usually the previous solution
1545: . field - The field to which the conditions apply
1546: . f - The function which defines the boundary condition
1547: - ctx - The user-supplied context
1549: Output Parameter:
1550: . b - The Rhs vector
1552: Level: advanced
1554: .keywords boundary conditions, finite element
1555: .seealso MeshGetBoundaryStart
1556: @*/
1557: int GridSetVecBoundaryDifference(int bd, int field, GVec u, PointFunction f, GVec b, void *ctx)
1558: {
1559: Grid grid;
1560: int ierr;
1564: GVecGetGrid(b, &grid);
1565: GridSetVecBoundaryDifferenceRectangular(bd, field, u, f, grid->order, b, ctx);
1566: return(0);
1567: }
1569: /*@C GridSetVecBoundaryDifferenceRectangular
1570: This function sets Dirchlet boundary conditions on the linear Rhs arising
1571: from the underlying grid, but actually sets it to the difference of the
1572: function value and the value in the given vector. This is commonly used in
1573: a time dependent, nonlinear problem for which we would like the rhs boundary
1574: values to be:
1576: U^{n+1}_k - U^{n+1}_{k+1}
1578: where n is the time iteration index, and k is the Newton iteration index. This
1579: means that the solution will be updated to U^{n+1}_{k+1} if the Jacobian is the
1580: identity for that row. This is very useful for time dependent boundary conditions
1581: for which the traditional method of letting the rhs value be zero does not work.
1583: Collective on GVec
1585: Input Parameters:
1586: + bd - The marker for the boundary to apply conditions along
1587: . u - A grid vector, usually the previous solution
1588: . field - The field to which the conditions apply
1589: . f - The function which defines the boundary condition
1590: . order - The test variable ordering
1591: - ctx - The user-supplied context
1593: Output Parameter:
1594: . b - The Rhs vector
1596: Level: advanced
1598: .keywords boundary conditions, finite element
1599: .seealso MeshGetBoundaryStart
1600: @*/
1601: int GridSetVecBoundaryDifferenceRectangular(int bd, int field, GVec u, PointFunction f, VarOrdering order, GVec b, void *ctx)
1602: {
1603: Grid grid;
1604: Mesh mesh;
1605: int comp; /* The number of field components */
1606: int size; /* The number of nodes in the boundary */
1607: int *localStart; /* The offset of this field on a node of a given class */
1608: int node; /* The canonical node number of the current boundary node */
1609: int nclass; /* The class of the current boundary node */
1610: double *x, *y, *z; /* Coordinates of the boundary nodes */
1611: int vars; /* The number of variables affected (var/node * size) */
1612: int *offsets; /* The canonical variable number for the first variable on each node */
1613: int *rows; /* Rows corresponding to boundary nodes */
1614: PetscScalar *values; /* Boundary values */
1615: PetscScalar *uArray; /* The values in the vector u */
1616: int firstVar; /* The canonical number of the first variable in this domain */
1617: int rank;
1618: int i, j, count;
1619: #ifdef PETSC_USE_BOPT_g
1620: PetscTruth opt;
1621: #endif
1622: int ierr;
1627: GVecGetGrid(b, &grid);
1628: GridValidField(grid, field);
1629: MPI_Comm_rank(grid->comm, &rank);
1630: mesh = grid->mesh;
1631: comp = grid->fields[field].disc->comp;
1632: firstVar = order->firstVar[rank];
1633: offsets = order->offsets;
1634: localStart = order->localStart[field];
1636: /* Support for constrained problems */
1637: VecGetSize(b, &size);
1638: if (grid->isConstrained) {
1639: if (size != grid->constraintOrder->numVars) {
1640: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->constraintOrder->numVars);
1641: }
1642: offsets = grid->constraintOrder->offsets;
1643: } else {
1644: if (size != grid->order->numVars) {
1645: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->order->numVars);
1646: }
1647: }
1649: /* Allocate memory */
1650: GridGetBoundarySize(grid, bd, field, &size);
1651: if (size == 0) {
1652: #ifdef PETSC_USE_BOPT_g
1653: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1654: if (opt == PETSC_TRUE) {
1655: PetscSynchronizedFlush(grid->comm);
1656: PetscSynchronizedFlush(grid->comm);
1657: }
1658: #endif
1659: VecAssemblyBegin(b);
1660: VecAssemblyEnd(b);
1661: return(0);
1662: }
1663: vars = size*comp;
1664: PetscMalloc(size * sizeof(double), &x);
1665: PetscMalloc(size * sizeof(double), &y);
1666: PetscMalloc(size * sizeof(double), &z);
1667: PetscMalloc(vars * sizeof(PetscScalar), &values);
1668: PetscMalloc(vars * sizeof(int), &rows);
1670: /* Loop over boundary nodes */
1671: GridGetBoundaryStart(grid, bd, field, PETSC_FALSE, &node, &nclass);
1672: for(i = 0, count = 0; node >= 0; i++) {
1673: for(j = 0; j < comp; j++, count++) {
1674: rows[count] = offsets[node] + j + localStart[nclass];
1675: #ifdef PETSC_USE_BOPT_g
1676: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1677: if (opt == PETSC_TRUE) {
1678: PetscSynchronizedPrintf(grid->comm, "[%d]bd %d field: %d node: %d row: %d class: %dn",
1679: rank, bd, field, node, rows[count], nclass);
1680: }
1681: #endif
1682: }
1683: MeshGetNodeCoords(mesh, node, &x[i], &y[i], &z[i]);
1684: GridGetBoundaryNext(grid, bd, field, PETSC_FALSE, &node, &nclass);
1685: }
1686: #ifdef PETSC_USE_BOPT_g
1687: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1688: if (opt == PETSC_TRUE) {
1689: PetscSynchronizedFlush(grid->comm);
1690: }
1691: #endif
1692: /* Get boundary values */
1693: (*f)(size, comp, x, y, z, values, ctx);
1694: /* Taking the difference (we know that no values are off-processor) */
1695: VecGetArray(u, &uArray);
1696: for(i = 0; i < vars; i++)
1697: values[i] = uArray[rows[i]-firstVar] - values[i];
1698: VecRestoreArray(u, &uArray);
1699: /* Put values in Rhs */
1700: #ifdef PETSC_USE_BOPT_g
1701: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1702: if (opt == PETSC_TRUE) {
1703: PetscPrintf(grid->comm, "Setting boundary values in rhs bd %d field %dn", bd, field);
1704: for(i = 0; i < vars; i++) PetscSynchronizedPrintf(grid->comm, " row: %d val: %gn", rows[i], PetscRealPart(values[i]));
1705: PetscSynchronizedFlush(grid->comm);
1706: }
1707: #endif
1708: VecSetValues(b, vars, rows, values, INSERT_VALUES);
1709: VecAssemblyBegin(b);
1710: VecAssemblyEnd(b);
1712: PetscFree(x);
1713: PetscFree(y);
1714: PetscFree(z);
1715: PetscFree(values);
1716: PetscFree(rows);
1717: return(0);
1718: }
1720: /*@C GridSetVecPointBoundaryDifference
1721: This function sets Dirchlet boundary conditions on the linear Rhs arising
1722: from the underlying grid, but actually sets it to the difference of the
1723: function value and the value in the given vector. This is commonly used in
1724: a time dependent, nonlinear problem for which we would like the rhs boundary
1725: values to be:
1727: U^{n+1}_k - U^{n+1}_{k+1}
1729: where n is the time iteration index, and k is the Newton iteration index. This
1730: means that the solution will be updated to U^{n+1}_{k+1} if the Jacobian is the
1731: identity for that row. This is very useful for time dependent boundary conditions
1732: for which the traditional method of letting the rhs value be zero does not work.
1734: Collective on GVec
1736: Input Parameters:
1737: + node - The constrained node
1738: . u - A grid vector, usually the previous solution
1739: . field - The field to which the conditions apply
1740: . f - The function which defines the boundary condition
1741: - ctx - The user-supplied context
1743: Output Parameter:
1744: . b - The Rhs vector
1746: Level: advanced
1748: .keywords boundary conditions, finite element
1749: .seealso MeshGetBoundaryStart
1750: @*/
1751: int GridSetVecPointBoundaryDifference(int node, int field, GVec u, PointFunction f, GVec b, void *ctx)
1752: {
1753: Grid grid;
1754: int ierr;
1758: GVecGetGrid(b, &grid);
1759: GridSetVecPointBoundaryDifferenceRectangular(node, field, u, f, grid->order, b, ctx);
1760: return(0);
1761: }
1763: /*@C GridSetVecBoundaryDifferenceRectangular
1764: This function sets Dirchlet boundary conditions on the linear Rhs arising
1765: from the underlying grid, but actually sets it to the difference of the
1766: function value and the value in the given vector. This is commonly used in
1767: a time dependent, nonlinear problem for which we would like the rhs boundary
1768: values to be:
1770: U^{n+1}_k - U^{n+1}_{k+1}
1772: where n is the time iteration index, and k is the Newton iteration index. This
1773: means that the solution will be updated to U^{n+1}_{k+1} if the Jacobian is the
1774: identity for that row. This is very useful for time dependent boundary conditions
1775: for which the traditional method of letting the rhs value be zero does not work.
1777: Collective on GVec
1779: Input Parameters:
1780: + node - The constrained node
1781: . u - A grid vector, usually the previous solution
1782: . field - The field to which the conditions apply
1783: . f - The function which defines the boundary condition
1784: . order - The test variable ordering
1785: - ctx - The user-supplied context
1787: Output Parameter:
1788: . b - The Rhs vector
1790: Level: advanced
1792: .keywords boundary conditions, finite element
1793: .seealso MeshGetBoundaryStart
1794: @*/
1795: int GridSetVecPointBoundaryDifferenceRectangular(int node, int field, GVec u, PointFunction f, VarOrdering order, GVec b, void *ctx)
1796: {
1797: Grid grid;
1798: Mesh mesh;
1799: int comp; /* The number of field components */
1800: int size; /* The number of nodes in the boundary */
1801: int *localStart; /* The offset of this field on a node of a given class */
1802: int nclass; /* The class of the current boundary node */
1803: double x, y, z; /* Coordinates of the boundary nodes */
1804: int *offsets; /* The canonical variable number for the first variable on each node */
1805: int *rows; /* Rows corresponding to boundary nodes */
1806: PetscScalar *values; /* Boundary values */
1807: PetscScalar *uArray; /* The values in the vector u */
1808: int firstVar; /* The canonical number of the first variable in this domain */
1809: int rank;
1810: int i, j;
1811: #ifdef PETSC_USE_BOPT_g
1812: PetscTruth opt;
1813: #endif
1814: int ierr;
1817: if (node < 0) {
1818: VecAssemblyBegin(b);
1819: VecAssemblyEnd(b);
1820: return(0);
1821: }
1823: GVecGetGrid(b, &grid);
1824: GridGetMesh(grid, &mesh);
1825: GridValidField(grid, field);
1826: MPI_Comm_rank(grid->comm, &rank);
1827: comp = grid->fields[field].disc->comp;
1828: firstVar = order->firstVar[rank];
1829: offsets = order->offsets;
1830: localStart = order->localStart[field];
1832: /* Support for constrained problems */
1833: VecGetSize(b, &size);
1834: if (grid->isConstrained) {
1835: if (size != grid->constraintOrder->numVars) {
1836: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->constraintOrder->numVars);
1837: }
1838: offsets = grid->constraintOrder->offsets;
1839: } else {
1840: if (size != grid->order->numVars) {
1841: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid vector size %d should be %d", size, grid->order->numVars);
1842: }
1843: }
1845: /* Allocate memory */
1846: size = 1;
1847: PetscMalloc(comp * sizeof(PetscScalar), &values);
1848: PetscMalloc(comp * sizeof(int), &rows);
1850: MeshGetNodeCoords(mesh, node, &x, &y, &z);
1851: GridGetNodeClass(grid, node, &nclass);
1852: for(j = 0; j < comp; j++) {
1853: rows[j] = offsets[node] + j + localStart[nclass];
1854: #ifdef PETSC_USE_BOPT_g
1855: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1856: if (opt == PETSC_TRUE) {
1857: PetscPrintf(PETSC_COMM_SELF, "[%d]field: %d node: %d row: %d class: %dn", rank, field, node, rows[j], nclass);
1858: }
1859: #endif
1860: }
1861: /* Get boundary values */
1862: (*f)(size, comp, &x, &y, &z, values, ctx);
1863: /* Taking the difference (we know that no values are off-processor) */
1864: VecGetArray(u, &uArray);
1865: for(i = 0; i < comp; i++) values[i] = uArray[rows[i]-firstVar] - values[i];
1866: VecRestoreArray(u, &uArray);
1867: /* Put values in Rhs */
1868: #ifdef PETSC_USE_BOPT_g
1869: PetscOptionsHasName(PETSC_NULL, "-trace_bc", &opt);
1870: if (opt == PETSC_TRUE) {
1871: PetscPrintf(grid->comm, "Setting boundary values on rhs node %d field %dn", node, field);
1872: for(i = 0; i < comp; i++) PetscPrintf(PETSC_COMM_SELF, " row: %d val: %gn", rows[i], PetscRealPart(values[i]));
1873: }
1874: #endif
1875: VecSetValues(b, comp, rows, values, INSERT_VALUES);
1876: VecAssemblyBegin(b);
1877: VecAssemblyEnd(b);
1879: PetscFree(values);
1880: PetscFree(rows);
1881: return(0);
1882: }