Actual source code: grid.c
1: #ifdef PETSC_RCS_HEADER
2: static char vcid[] = "$Id: grid.c,v 1.24 2000/07/16 05:40:26 knepley Exp $";
3: #endif
5: /*
6: This file provides routines for grid vectors (vectors that are associated
7: with grids, possibly with multiple degrees of freedom per node).
9: This component is new; changes in the user interface will be occuring in the
10: near future!
11: */
13: #include "petscsles.h" /* For ALE Mesh Support *//*I "petscsles.h" I*/
14: #include "src/grid/gridimpl.h" /*I "grid.h" I*//*I "gvec.h" I*/
16: /* Logging support */
17: int GRID_COOKIE;
18: int GRID_Reform, GRID_SetUp;
20: /*---------------------------------------------- Standard Grid Functions --------------------------------------------*/
21: /*@
22: GridDestroy - Destroys a grid.
24: Collective on Grid
26: Input Parameter:
27: . grid - The grid
29: Level: beginner
31: .keywrods: grid, destroy
32: .seealso: GridView(), GridCreateGVec(), GridSetUp(), GridRefine()
33: @*/
34: int GridDestroy(Grid grid)
35: {
40: if (--grid->refct > 0) return(0);
41: (*grid->ops->destroy)(grid);
42: PetscLogObjectDestroy(grid);
43: PetscHeaderDestroy(grid);
44: return(0);
45: }
47: /*@
48: GridView - Views a grid.
50: Collective on Grid
52: Input Parameters:
53: + grid - The grid
54: - viewer - The viewer, PETSC_VIEWER_STDOUT_WORLD by default
56: Level: beginner
58: .keywords: grid, view
59: .seealso: GVecView()
60: @*/
61: int GridView(Grid grid, PetscViewer viewer)
62: {
67: /* Do not check return from setup, since we may not have a problem yet */
68: if (grid->setupcalled == PETSC_FALSE) GridSetUp(grid);
69: if (!viewer) {
70: viewer = PETSC_VIEWER_STDOUT_SELF;
71: } else {
73: }
74: (*grid->ops->view)(grid, viewer);
75: return(0);
76: }
78: EXTERN_C_BEGIN
79: int GridSerialize_Generic(MPI_Comm comm, Grid *grid, PetscViewer viewer, PetscTruth store) {
80: Grid g;
81: Mesh mesh;
82: int fd, hasParent;
83: int one = 1;
84: int zero = 0;
85: int field, len;
86: char *type_name = PETSC_NULL;
87: PetscTruth match;
88: int ierr;
94: PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_BINARY, &match);
95: if (match == PETSC_FALSE) SETERRQ(PETSC_ERR_ARG_WRONG, "Must be binary viewer");
96: PetscViewerBinaryGetDescriptor(viewer, &fd);
98: if (store) {
99: g = *grid;
101: MeshSerialize(comm, &g->mesh, viewer, store);
102: PetscStrlen(g->type_name, &len);
103: PetscBinaryWrite(fd, &len, 1, PETSC_INT, 0);
104: PetscBinaryWrite(fd, g->type_name, len, PETSC_CHAR, 0);
106: PetscBinaryWrite(fd, &g->numFields, 1, PETSC_INT, 0);
107: for(field = 0; field < g->numFields; field++) {
108: if (g->fields[field].name != PETSC_NULL) {
109: PetscStrlen(g->fields[field].name, &len);
110: } else {
111: len = 0;
112: }
113: PetscBinaryWrite(fd, &len, 1, PETSC_INT, 0);
114: PetscBinaryWrite(fd, g->fields[field].name, len, PETSC_CHAR, 0);
115: PetscBinaryWrite(fd, &g->fields[field].numComp, 1, PETSC_INT, 0);
116: PetscStrlen(g->fields[field].discType, &len);
117: PetscBinaryWrite(fd, &len, 1, PETSC_INT, 0);
118: PetscBinaryWrite(fd, g->fields[field].discType, len, PETSC_CHAR, 0);
119: DiscretizationSerialize(g->comm, &g->fields[field].disc, viewer, store);
120: PetscBinaryWrite(fd, &g->fields[field].isActive, 1, PETSC_INT, 0);
121: PetscBinaryWrite(fd, &g->fields[field].isConstrained, 1, PETSC_INT, 0);
122: PetscBinaryWrite(fd, &g->fields[field].constraintCompDiff, 1, PETSC_INT, 0);
123: }
125: if (g->gridparent != PETSC_NULL) {
126: PetscBinaryWrite(fd, &one, 1, PETSC_INT, 0);
127: GridSerialize(comm, &g->gridparent, viewer, store);
128: } else {
129: PetscBinaryWrite(fd, &zero, 1, PETSC_INT, 0);
130: }
131: PetscBinaryWrite(fd, &g->isConstrained, 1, PETSC_INT, 0);
132: #ifdef NEW_GRID_SERIALIZE
133: /* This stuff is all made in GridSetUp */
134: FieldClassSerialize(g->comm, &g->cm, viewer, store);
135: VarOrderingSerialize(g->cm, &g->order, viewer, store);
136: FieldClassSerialize(g->comm, &g->constraintCM, viewer, store);
137: VarOrderingSerialize(g->constraintCM, &g->constraintOrder, viewer, store);
138: ISSerialize(g->comm, &g->constraintOrdering, viewer, store);
139: MatSerialize(g->comm, &g->constraintMatrix, viewer, store);
140: #endif
142: PetscBinaryWrite(fd, &g->reduceAlpha, 1, PETSC_SCALAR, 0);
143: PetscBinaryWrite(fd, &g->interpolationType, 1, PETSC_INT, 0);
144: PetscBinaryWrite(fd, &g->viewField, 1, PETSC_INT, 0);
145: PetscBinaryWrite(fd, &g->viewComp, 1, PETSC_INT, 0);
146: } else {
147: MeshSerialize(comm, &mesh, viewer, store);
148: GridCreate(mesh, &g);
149: MeshDestroy(mesh);
150: PetscBinaryRead(fd, &len, 1, PETSC_INT);
151: if (len) {
152: PetscMalloc((len+1) * sizeof(char), &type_name);
153: type_name[len] = 0;
154: }
155: PetscBinaryRead(fd, type_name, len, PETSC_CHAR);
156: PetscBinaryRead(fd, &g->numFields, 1, PETSC_INT);
157: g->maxFields = g->numFields;
158: PetscFree(g->fields);
159: PetscMalloc(g->numFields * sizeof(Field), &g->fields);
160: for(field = 0; field < g->numFields; field++) {
161: PetscBinaryRead(fd, &len, 1, PETSC_INT);
162: if (len) {
163: PetscMalloc((len+1) * sizeof(char), &g->fields[field].name);
164: g->fields[field].name[len] = 0;
165: }
166: PetscBinaryRead(fd, g->fields[field].name, len, PETSC_CHAR);
167: PetscBinaryRead(fd, &g->fields[field].numComp, 1, PETSC_INT);
168: PetscBinaryRead(fd, &len, 1, PETSC_INT);
169: PetscMalloc((len+1) * sizeof(DiscretizationType), &g->fields[field].discType);
170: g->fields[field].discType[len] = 0;
171: PetscBinaryRead(fd, g->fields[field].discType, len, PETSC_CHAR);
172: DiscretizationSerialize(g->comm, &g->fields[field].disc, viewer, store);
173: PetscTypeCompare((PetscObject) g->fields[field].disc, g->fields[field].discType, &match);
174: if (match == PETSC_FALSE) SETERRQ(PETSC_ERR_ARG_CORRUPT, "Incorrect serialization of Discretization");
175: PetscBinaryRead(fd, &g->fields[field].isActive, 1, PETSC_INT);
176: PetscBinaryRead(fd, &g->fields[field].isConstrained, 1, PETSC_INT);
177: PetscBinaryRead(fd, &g->fields[field].constraintCompDiff, 1, PETSC_INT);
178: }
179: GridSetType(g, type_name);
180: if (len) {
181: PetscFree(type_name);
182: }
184: PetscBinaryRead(fd, &hasParent, 1, PETSC_INT);
185: if (hasParent) {
186: GridSerialize(comm, &g->gridparent, viewer, store);
187: }
188: PetscBinaryRead(fd, &g->isConstrained, 1, PETSC_INT);
189: #ifdef NEW_GRID_SERIALIZE
190: /* This stuff is all made in GridSetUp */
191: FieldClassSerialize(g->comm, &g->cm, viewer, store);
192: VarOrderingSerialize(g->cm, &g->order, viewer, store);
193: FieldClassSerialize(g->comm, &g->constraintCM, viewer, store);
194: VarOrderingSerialize(g->constraintCM, &g->constraintOrder, viewer, store);
195: ISSerialize(g->comm, &g->constraintOrdering, viewer, store);
196: MatSerialize(g->comm, &g->constraintMatrix, viewer, store);
197: #endif
199: PetscBinaryRead(fd, &g->reduceAlpha, 1, PETSC_SCALAR);
200: PetscBinaryRead(fd, &g->interpolationType, 1, PETSC_INT);
201: PetscBinaryRead(fd, &g->viewField, 1, PETSC_INT);
202: PetscBinaryRead(fd, &g->viewComp, 1, PETSC_INT);
204: *grid = g;
205: }
207: return(0);
208: }
209: EXTERN_C_END
211: /*@
212: GridSetUp - Set up any required internal data structures for the grid.
214: Collective on Grid
216: Input Parameter:
217: . grid - The grid
219: Level: intermediate
221: .keywords: grid, setup
222: .seealso: GridDestroy(), GridCreateGVec()
223: @*/
224: int GridSetUp(Grid grid)
225: {
226: LocalVarOrdering reduceLocOrder;
227: LocalVarOrdering locOrder;
228: int sField, tField;
229: int bd, bc, op;
230: int ierr;
234: if (grid->setupcalled == PETSC_TRUE) return(0);
235: grid->setupcalled = PETSC_TRUE;
237: PetscLogEventBegin(GRID_SetUp, grid, 0, 0, 0);
238: if (grid->numFields <= 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE, "At least one field must be defined on the Grid");
239: /* Setup boundary sizes */
240: for(bd = 0; bd < grid->numBd; bd++) {
241: if (grid->bdSize[bd] != PETSC_NULL) {
242: PetscFree(grid->bdSize[bd]);
243: }
244: PetscMalloc(grid->numFields * sizeof(int), &grid->bdSize[bd]);
245: PetscLogObjectMemory(grid, grid->numFields * sizeof(int));
246: }
247: /* Setup type-specific stuff */
248: if (grid->ops->gridsetup) {
249: (*grid->ops->gridsetup)(grid);
250: if (ierr != 0) PetscFunctionReturn(ierr);
251: }
252: /* Setup constraint structures */
253: (*grid->ops->gridsetupconstraints)(grid, grid->constraintCtx);
254: /* Setup ghost variable structures */
255: GridSetupGhostScatter(grid);
256: /* Setup boundary conditions reductions */
257: if (grid->reduceSystem == PETSC_TRUE) {
258: /* Create storage for reduction of Rhs */
259: if (grid->bdReduceVec != PETSC_NULL) {
260: GVecDestroy(grid->bdReduceVec);
261: }
262: GVecCreateRectangularGhost(grid, grid->reduceOrder, &grid->bdReduceVec);
263: grid->bdReduceVecCur = grid->bdReduceVec;
265: /* Hopefully, the user specified the right context by now */
266: GridCalcBCValues(grid, PETSC_FALSE, grid->reduceContext);
268: if (grid->reduceElement == PETSC_FALSE) {
269: /* Create storage for explicit reduction of Rhs */
270: GVecCreate(grid, &grid->reduceVec);
271: GMatCreateRectangular(grid, grid->reduceOrder, grid->order, &grid->bdReduceMat);
272: MatSetOption(grid->bdReduceMat, MAT_NEW_NONZERO_ALLOCATION_ERR);
274: /* Initialize storage */
275: MatZeroEntries(grid->bdReduceMat);
277: /* Evaluate the elimination block */
278: for(bc = 0; bc < grid->numBC; bc++) {
279: for(op = 0; op < grid->numMatOps; op++) {
280: sField = grid->matOps[op].field;
281: tField = grid->fields[sField].disc->operators[grid->matOps[op].op]->test->field;
282: if (sField == grid->bc[bc].field) {
283: LocalVarOrderingCreate(grid, 1, &tField, &locOrder);
284: LocalVarOrderingCreate(grid, 1, &sField, &reduceLocOrder);
285: GMatEvaluateOperatorGalerkin(grid->bdReduceMat, PETSC_NULL, 1, &sField, &tField, grid->matOps[op].op,
286: grid->matOps[op].alpha, MAT_FLUSH_ASSEMBLY, grid->reduceContext);
287:
288: LocalVarOrderingDestroy(locOrder);
289: LocalVarOrderingDestroy(reduceLocOrder);
290: }
291: #ifdef PETSC_USE_BOPT_g
292: #endif
293: }
294: }
295: for(bc = 0; bc < grid->numPointBC; bc++) {
296: for(op = 0; op < grid->numMatOps; op++) {
297: sField = grid->matOps[op].field;
298: tField = grid->fields[sField].disc->operators[grid->matOps[op].op]->test->field;
299: if (sField == grid->pointBC[bc].field) {
300: LocalVarOrderingCreate(grid, 1, &tField, &locOrder);
301: LocalVarOrderingCreate(grid, 1, &sField, &reduceLocOrder);
302: GMatEvaluateOperatorGalerkin(grid->bdReduceMat, PETSC_NULL, 1, &sField, &tField, grid->matOps[op].op,
303: grid->matOps[op].alpha, MAT_FLUSH_ASSEMBLY, grid->reduceContext);
304:
305: LocalVarOrderingDestroy(locOrder);
306: LocalVarOrderingDestroy(reduceLocOrder);
307: }
308: #ifdef PETSC_USE_BOPT_g
309: #endif
310: }
311: }
312: MatAssemblyBegin(grid->bdReduceMat, MAT_FINAL_ASSEMBLY);
313: MatAssemblyEnd(grid->bdReduceMat, MAT_FINAL_ASSEMBLY);
314: }
315: }
316: PetscLogEventEnd(GRID_SetUp, grid, 0, 0, 0);
317: return(0);
318: }
320: /*@
321: GridSetupGhostScatter - Set up the scatter from a global vector to local values, including ghosts.
323: Collective on Grid
325: Input Parameter:
326: . grid - The grid
328: Level: advanced
330: .keywords: grid, ghost, scatter
331: .seealso: GridDestroy(), GridCreateGVec()
332: @*/
333: int GridSetupGhostScatter(Grid grid)
334: {
335: PetscTruth isConstrained, explicitConstraints;
336: int ierr;
340: GridSetUp(grid);
341: /* Destroy old structures */
342: if (grid->ghostVec != PETSC_NULL) {
343: VecDestroy(grid->ghostVec);
344: }
345: if (grid->ghostScatter != PETSC_NULL) {
346: VecScatterDestroy(grid->ghostScatter);
347: }
348: /* Setup scatter from global vector to local ghosted vector */
349: GridIsConstrained(grid, &isConstrained);
350: GridGetExplicitConstraints(grid, &explicitConstraints);
351: if ((isConstrained == PETSC_TRUE) && (explicitConstraints == PETSC_TRUE)) {
352: (*grid->ops->gridsetupghostscatter)(grid, grid->constraintOrder, &grid->ghostVec, &grid->ghostScatter);
353:
354: } else {
355: (*grid->ops->gridsetupghostscatter)(grid, grid->order, &grid->ghostVec, &grid->ghostScatter);
356: }
357: return(0);
358: }
360: /*@
361: GridSetupBoundary - Set up any required internal data structures for the boundary.
363: Collective on Grid
365: Input Parameter:
366: . grid - The grid
368: Level: advanced
370: .keywords: grid, setup, boundary
371: .seealso: GridDestroy(), GridCreateGVec()
372: @*/
373: int GridSetupBoundary(Grid grid)
374: {
379: if (grid->bdSetupCalled == PETSC_TRUE) return(0);
380: GridSetUp(grid);
381: if (grid->ops->gridsetupboundary) {
382: (*grid->ops->gridsetupboundary)(grid);
383: }
384: return(0);
385: }
387: /*@
388: GridGetMesh - Returns the context for the underlying mesh.
390: Not collective
392: Input Parameter:
393: . grid - The initial grid
395: Output Parameter:
396: . mesh - The mesh
398: Level: intermediate
400: .keywords: grid, mesh
401: .seealso: GridDestroy()
402: @*/
403: int GridGetMesh(Grid grid, Mesh *mesh)
404: {
408: *mesh = grid->mesh;
409: return(0);
410: }
412: /*@
413: GridGetClassMap - This function returns the default field class map for the grid.
415: Not collective
417: Input Parameter:
418: . grid - The grid
420: Output Parameter:
421: . order - The default field class map
423: Level: intermediate
425: .keywords: Grid, get, class map
426: .seealso: GridGetMesh(), GridGetOrder(), FieldClassMapCreate()
427: @*/
428: int GridGetClassMap(Grid grid, FieldClassMap *map)
429: {
433: if (grid->isConstrained == PETSC_TRUE) {
434: *map = grid->constraintCM;
435: } else {
436: *map = grid->cm;
437: }
438: return(0);
439: }
441: /*@
442: GridGetOrder - This function returns the default ordering for the grid.
444: Not collective
446: Input Parameter:
447: . grid - The grid
449: Output Parameter:
450: . order - The default variable ordering
452: Level: intermediate
454: .keywords: Grid, get, order
455: .seealso: GridGetLocalOrder(), VarOrderingCreate()
456: @*/
457: int GridGetOrder(Grid grid, VarOrdering *order)
458: {
462: if (grid->isConstrained == PETSC_TRUE) {
463: *order = grid->constraintOrder;
464: } else {
465: *order = grid->order;
466: }
467: return(0);
468: }
470: /*@
471: GridGetLocalOrder - This function returns the local ordering for the grid.
473: Not collective
475: Input Parameter:
476: . grid - The grid
478: Output Parameter:
479: . order - The local variable ordering
481: Level: intermediate
483: .keywords: Grid, get, local, order
484: .seealso: GridGetOrder(), LocalVarOrderingCreate()
485: @*/
486: int GridGetLocalOrder(Grid grid, LocalVarOrdering *order)
487: {
491: *order = grid->locOrder;
492: return(0);
493: }
495: /*@
496: GridGetDiscretization - This function returns the discretization for the given field.
498: Not collective
500: Input Parameters:
501: + grid - The grid
502: - field - The field
504: Output Parameter:
505: . disc - The Discretization
507: Level: intermediate
509: .keywords: Grid, get, Discretization, field
510: .seealso: GridGetOrder(), DiscretizationCreate()
511: @*/
512: int GridGetDiscretization(Grid grid, int field, Discretization *disc) {
516: GridValidField(grid, field);
517: *disc = grid->fields[field].disc;
518: return(0);
519: }
521: /*
522: GridSetTypeFromOptions - Sets the type of grid from user options.
524: Collective on Grid
526: Input Parameter:
527: . grid - The grid
529: Level: intermediate
531: .keywords: Grid, set, options, database, type
532: .seealso: GridSetFromOptions(), GridSetType()
533: */
534: static int GridSetTypeFromOptions(Grid grid)
535: {
536: PetscTruth opt;
537: char *defaultType;
538: char typeName[256];
539: int ierr;
542: if (grid->type_name != PETSC_NULL) {
543: defaultType = grid->type_name;
544: } else {
545: defaultType = GRID_TRIANGULAR_2D;
546: }
548: if (GridRegisterAllCalled == PETSC_FALSE) {
549: GridRegisterAll(PETSC_NULL);
550: }
551: PetscOptionsList("-grid_type", "Grid method"," GridSetType", GridList, defaultType, typeName, 256, &opt);
552: if (opt == PETSC_TRUE) {
553: GridSetType(grid, typeName);
554: } else {
555: GridSetType(grid, defaultType);
556: }
557: return(0);
558: }
560: /*
561: GridSetSerializeTypeFromOptions - Sets the type of grid serialization from user options.
563: Collective on Grid
565: Input Parameter:
566: . grid - The grid
568: Level: intermediate
570: .keywords: Grid, set, options, database, type
571: .seealso: GridSetFromOptions(), GridSetSerializeType()
572: */
573: static int GridSetSerializeTypeFromOptions(Grid grid)
574: {
575: PetscTruth opt;
576: char *defaultType;
577: char typeName[256];
578: int ierr;
581: if (grid->serialize_name != PETSC_NULL) {
582: defaultType = grid->serialize_name;
583: } else {
584: defaultType = GRID_SER_GENERIC;
585: }
587: if (GridSerializeRegisterAllCalled == PETSC_FALSE) {
588: GridSerializeRegisterAll(PETSC_NULL);
589: }
590: PetscOptionsList("-grid_serialize_type", "Grid serialization method"," GridSetSerializeType", GridSerializeList,
591: defaultType, typeName, 256, &opt);
592: if (opt == PETSC_TRUE) {
593: GridSetSerializeType(grid, typeName);
594: } else {
595: GridSetSerializeType(grid, defaultType);
596: }
597: return(0);
598: }
600: /*@
601: GridSetFromOptions - Sets various Grid parameters from user options.
603: Collective on Grid
605: Input Parameter:
606: . grid - The grid
608: Notes: To see all options, run your program with the -help option, or consult the users manual.
609: Must be called after GridCreate() but before the Grid is used.
611: Level: intermediate
613: .keywords: Grid, set, options, database
614: .seealso: GridCreate(), GridPrintHelp(), GridSetOptionsPrefix()
615: @*/
616: int GridSetFromOptions(Grid grid)
617: {
618: Mesh mesh;
619: char *intNames[NUM_INTERPOLATIONS] = {"Local", "Halo", "L2", "L2Local"};
620: char intType[256];
621: PetscTruth opt;
622: int ierr;
626: PetscOptionsBegin(grid->comm, grid->prefix, "Grid options", "Grid");
628: /* Handle generic grid options */
629: PetscOptionsHasName(PETSC_NULL, "-help", &opt);
630: if (opt == PETSC_TRUE) {
631: GridPrintHelp(grid);
632: }
634: /* Constraint options */
635: PetscOptionsName("-grid_explicit_constraints", "Explicitly form constrained system", "GridGetExplicitConstraints", &grid->explicitConstraints);
637: /* Interpolation options */
638: PetscOptionsEList("-grid_int_type", "Interpolation Method", "None", intNames, NUM_INTERPOLATIONS,
639: intNames[grid->interpolationType], intType, 256, &opt);
640: if (opt == PETSC_TRUE) {
641: PetscTruth islocal, ishalo, isl2, isl2local;
643: PetscStrcasecmp(intType, intNames[INTERPOLATION_LOCAL], &islocal);
644: PetscStrcasecmp(intType, intNames[INTERPOLATION_HALO], &ishalo);
645: PetscStrcasecmp(intType, intNames[INTERPOLATION_L2], &isl2);
646: PetscStrcasecmp(intType, intNames[INTERPOLATION_L2_LOCAL], &isl2local);
647: if (islocal == PETSC_TRUE) {
648: grid->interpolationType = INTERPOLATION_LOCAL;
649: } else if (ishalo == PETSC_TRUE) {
650: grid->interpolationType = INTERPOLATION_HALO;
651: } else if (isl2 == PETSC_TRUE) {
652: grid->interpolationType = INTERPOLATION_L2;
653: } else if (isl2local == PETSC_TRUE) {
654: grid->interpolationType = INTERPOLATION_L2_LOCAL;
655: }
656: }
658: /* Graphics options */
659: PetscOptionsInt("-grid_view_field", "View Field", "None", grid->viewField, &grid->viewField, &opt);
660: PetscOptionsInt("-grid_view_comp", "View Component", "None", grid->viewComp, &grid->viewComp, &opt);
662: /* Handle grid type options */
663: GridSetTypeFromOptions(grid);
665: /* Handle grid serialization options */
666: GridSetSerializeTypeFromOptions(grid);
668: /* Handle specific grid options */
669: if (grid->ops->setfromoptions != PETSC_NULL) {
670: (*grid->ops->setfromoptions)(grid);
671: }
673: PetscOptionsEnd();
675: /* Handle subobject options */
676: GridGetMesh(grid, &mesh);
677: MeshSetFromOptions(mesh);
679: GridViewFromOptions(grid, grid->name);
680: return(0);
681: }
683: /*@
684: GridViewFromOptions - This function visualizes the grid based upon user options.
686: Collective on Grid
688: Input Parameter:
689: . grid - The grid
691: Level: intermediate
693: .keywords: Grid, view, options, database
694: .seealso: GridSetFromOptions(), GridView()
695: @*/
696: int GridViewFromOptions(Grid grid, char *title)
697: {
698: PetscViewer viewer;
699: PetscDraw draw;
700: PetscTruth opt;
701: char *titleStr;
702: char typeName[1024];
703: char fileName[1024];
704: int len;
705: int ierr;
708: PetscOptionsHasName(grid->prefix, "-grid_view", &opt);
709: if (opt == PETSC_TRUE) {
710: PetscOptionsGetString(grid->prefix, "-grid_view", typeName, 1024, &opt);
711: PetscStrlen(typeName, &len);
712: if (len > 0) {
713: PetscViewerCreate(grid->comm, &viewer);
714: PetscViewerSetType(viewer, typeName);
715: PetscOptionsGetString(grid->prefix, "-grid_view_file", fileName, 1024, &opt);
716: if (opt == PETSC_TRUE) {
717: PetscViewerSetFilename(viewer, fileName);
718: } else {
719: PetscViewerSetFilename(viewer, grid->name);
720: }
721: GridView(grid, viewer);
722: PetscViewerFlush(viewer);
723: PetscViewerDestroy(viewer);
724: } else {
725: GridView(grid, PETSC_NULL);
726: }
727: }
728: PetscOptionsHasName(grid->prefix, "-grid_view_draw", &opt);
729: if (opt == PETSC_TRUE) {
730: PetscViewerDrawOpen(grid->comm, 0, 0, 0, 0, 300, 300, &viewer);
731: PetscViewerDrawGetDraw(viewer, 0, &draw);
732: if (title != PETSC_NULL) {
733: titleStr = title;
734: } else {
735: PetscObjectName((PetscObject) grid); CHKERRQ(ierr) ;
736: titleStr = grid->name;
737: }
738: PetscDrawSetTitle(draw, titleStr);
739: GridView(grid, viewer);
740: PetscViewerFlush(viewer);
741: PetscDrawPause(draw);
742: PetscViewerDestroy(viewer);
743: }
744: return(0);
745: }
747: /*@
748: GridPrintHelp - Prints all options for the grid.
750: Input Parameter:
751: . grid - The grid
753: Options Database Keys:
754: $ -help, -h
756: Level: intermediate
758: .keywords: Grid, help
759: .seealso: GridSetFromOptions()
760: @*/
761: int GridPrintHelp(Grid grid)
762: {
763: char p[64];
764: int ierr;
769: PetscStrcpy(p, "-");
770: if (grid->prefix != PETSC_NULL) {
771: PetscStrcat(p, grid->prefix);
772: }
774: (*PetscHelpPrintf)(grid->comm, "Grid options ------------------------------------------------n");
775: (*PetscHelpPrintf)(grid->comm," %sgrid_type <typename> : Sets the grid typen", p);
776: (*PetscHelpPrintf)(grid->comm," %sgrid_view_field <field>: Sets the field to visualizen", p);
777: (*PetscHelpPrintf)(grid->comm," %sgrid_view_comp <field>: Sets the component to visualizen", p);
778: return(0);
779: }
781: /*@C
782: GridSetOptionsPrefix - Sets the prefix used for searching for all grid options in the database.
784: Input Parameters:
785: + grid - The grid
786: - prefix - The prefix to prepend to all option names
788: Notes:
789: A hyphen (-) must NOT be given at the beginning of the prefix name.
790: The first character of all runtime options is AUTOMATICALLY the hyphen.
792: Level: intermediate
794: .keywords: grid, set, options, prefix, database
795: .seealso: GridSetFromOptions()
796: @*/
797: int GridSetOptionsPrefix(Grid grid, char *prefix)
798: {
803: PetscObjectSetOptionsPrefix((PetscObject) grid, prefix);
804: return(0);
805: }
807: /*@C
808: GridAppendOptionsPrefix - Appends to the prefix used for searching for all grid options in the database.
810: Collective on Grid
812: Input Parameters:
813: + grid - The grid
814: - prefix - The prefix to prepend to all option names
816: Notes:
817: A hyphen (-) must NOT be given at the beginning of the prefix name.
818: The first character of all runtime options is AUTOMATICALLY the hyphen.
820: Level: intermediate
822: .keywords: grid, append, options, prefix, database
823: .seealso: GridGetOptionsPrefix()
824: @*/
825: int GridAppendOptionsPrefix(Grid grid, char *prefix)
826: {
828:
831: PetscObjectAppendOptionsPrefix((PetscObject) grid, prefix);
832: return(0);
833: }
835: /*@
836: GridGetOptionsPrefix - Sets the prefix used for searching for all grid options in the database.
838: Input Parameter:
839: . grid - The grid
841: Output Parameter:
842: . prefix - A pointer to the prefix string used
844: Level: intermediate
846: .keywords: grid, get, options, prefix, database
847: .seealso: GridAppendOptionsPrefix()
848: @*/
849: int GridGetOptionsPrefix(Grid grid, char **prefix)
850: {
855: PetscObjectGetOptionsPrefix((PetscObject) grid, prefix);
856: return(0);
857: }
859: /*----------------------------------------------- Graphics Functions ------------------------------------------------*/
860: /*@C GridSetViewField
861: This function sets the field component to be used in visualization.
863: Collective on Grid
865: Input Parameters:
866: + grid - The grid
867: . field - The field
868: - comp - The component
870: Level: intermediate
872: .keywords grid, view, field
873: .seealso GridGetMeshInfo
874: @*/
875: int GridSetViewField(Grid grid, int field, int comp)
876: {
879: GridValidField(grid, field);
880: if ((comp < 0) || (comp >= grid->fields[field].numComp)) {
881: SETERRQ2(PETSC_ERR_ARG_WRONG, "Invalid field component %d should be in [0,%d)", comp, grid->fields[field].numComp);
882: }
883: grid->viewField = field;
884: grid->viewComp = comp;
885: return(0);
886: }
888: /*------------------------------------------ Interface to Mesh Functions --------------------------------------------*/
889: /*@C
890: GridGetNodeClass - This function returns the class of a given node
892: Not collective
894: Input Parameters:
895: + grid - The grid
896: - node - The node
898: Output Parameter:
899: . nclass - The node class
901: Level: intermediate
903: .keywords grid, mesh, node, class
904: .seealso GridGetMeshInfo
905: @*/
906: int GridGetNodeClass(Grid grid, int node, int *nclass)
907: {
908: FieldClassMap map;
909: int ierr;
914: GridGetClassMap(grid, &map);
915: FieldClassMapGetNodeClass(map, node, nclass);
916: return(0);
917: }
919: /*@
920: GridGetNearestNode - This function returns the node nearest to the given point on which
921: the field is defined, or -1 if the node is not contained in the local mesh.
923: Not collective
925: Input Parameters:
926: + mesh - The mesh
927: . field - The field
928: - x,y,z - The node coordinates
930: Output Parameter:
931: . node - The nearest node
933: Level: beginner
935: .keywords: grid, node, point location
936: .seealso MeshNearestNode(), MeshLocatePoint()
937: @*/
938: int GridGetNearestNode(Grid grid, int field, double x, double y, double z, int *node)
939: {
940: FieldClassMap map;
941: Mesh mesh;
942: Partition p;
943: PetscTruth defined;
944: double minDist, dist, nx, ny, nz;
945: int minNode, globalMinNode, allMinNode;
946: int numCorners, startNode, endNode;
947: int elem, corner, nearNode, nclass;
948: int ierr;
953: FieldClassMapCreateTriangular2D(grid, 1, &field, &map);
954: GridGetMesh(grid, &mesh);
955: MeshGetNumCorners(mesh, &numCorners);
956: MeshGetPartition(mesh, &p);
957: PartitionGetStartNode(p, &startNode);
958: PartitionGetEndNode(p, &endNode);
959: MeshLocatePoint(mesh, x, y, z, &elem);
960: if (elem >= 0) {
961: /* Find first node with field defined */
962: for(corner = 0, minDist = PETSC_MAX; corner < numCorners; corner++) {
963: MeshGetNodeFromElement(mesh, elem, corner, &minNode);
964: FieldClassMapGetNodeClass(map, minNode, &nclass);
965: FieldClassMapIsDefined(map, field, nclass, &defined);
966: if (defined == PETSC_TRUE) {
967: MeshGetNodeCoords(mesh, minNode, &nx, &ny, &nz);
968: minDist = PetscSqr(MeshPeriodicDiffX(mesh, nx - x)) + PetscSqr(MeshPeriodicDiffY(mesh, ny - y));
969: break;
970: }
971: }
972: if (corner == numCorners) SETERRQ2(PETSC_ERR_ARG_WRONG, "Element %d has no node with field %d defined", elem, field);
973: /* Find closest node with field defined */
974: for(corner = 1; corner < numCorners; corner++) {
975: MeshGetNodeFromElement(mesh, elem, corner, &nearNode);
976: MeshGetNodeCoords(mesh, nearNode, &nx, &ny, &nz);
977: FieldClassMapGetNodeClass(map, nearNode, &nclass);
978: FieldClassMapIsDefined(map, field, nclass, &defined);
979: dist = PetscSqr(nx - x) + PetscSqr(ny - y);
980: if ((defined == PETSC_TRUE) && (dist < minDist)) {
981: minDist = dist;
982: minNode = nearNode;
983: }
984: }
985: PartitionLocalToGlobalNodeIndex(p, minNode, &globalMinNode);
986: } else {
987: minNode = -1;
988: globalMinNode = -1;
989: }
990: FieldClassMapDestroy(map);
992: /* The minimum node might still be a ghost node */
993: MPI_Allreduce(&globalMinNode, &allMinNode, 1, MPI_INT, MPI_MAX, grid->comm);
994: if ((allMinNode >= startNode) && (allMinNode < endNode)) {
995: *node = allMinNode - startNode;
996: } else {
997: *node = -1;
998: }
999: if (allMinNode < 0) PetscFunctionReturn(1);
1000: return(0);
1001: }
1003: /*@
1004: GridGetNearestBdNode - This function returns the boundary node nearest to the given point
1005: on which the field is defined, or -1 if the node is not contained in the local mesh.
1007: Not collective
1009: Input Parameters:
1010: + mesh - The mesh
1011: . field - The field
1012: - x,y,z - The node coordinates
1014: Output Parameter:
1015: . node - The nearest boundary node
1017: Level: beginner
1019: .keywords: grid, node, point location
1020: .seealso MeshNearestBdNode(), MeshLocatePoint()
1021: @*/
1022: int GridGetNearestBdNode(Grid grid, int field, double x, double y, double z, int *node)
1023: {
1024: FieldClassMap map;
1025: Mesh mesh;
1026: Partition p;
1027: PetscTruth defined;
1028: double minDist, dist, nx, ny, nz;
1029: int minNode, globalMinNode, allMinNode;
1030: int numCorners, startNode, endNode;
1031: int elem, corner, nearNode, nclass, bd;
1032: int ierr;
1037: FieldClassMapCreateTriangular2D(grid, 1, &field, &map);
1038: GridGetMesh(grid, &mesh);
1039: MeshGetNumCorners(mesh, &numCorners);
1040: MeshGetPartition(mesh, &p);
1041: PartitionGetStartNode(p, &startNode);
1042: PartitionGetEndNode(p, &endNode);
1043: MeshLocatePoint(mesh, x, y, z, &elem);
1044: if (elem >= 0) {
1045: /* Find first node with field defined */
1046: for(corner = 0, minDist = PETSC_MAX; corner < numCorners; corner++) {
1047: MeshGetNodeFromElement(mesh, elem, corner, &minNode);
1048: MeshGetNodeBoundary(mesh, minNode, &bd);
1049: FieldClassMapGetNodeClass(map, minNode, &nclass);
1050: FieldClassMapIsDefined(map, field, nclass, &defined);
1051: if ((bd != 0) && (defined == PETSC_TRUE)) {
1052: MeshGetNodeCoords(mesh, minNode, &nx, &ny, &nz);
1053: minDist = PetscSqr(MeshPeriodicDiffX(mesh, nx - x)) + PetscSqr(MeshPeriodicDiffY(mesh, ny - y));
1054: break;
1055: }
1056: }
1057: if (corner == numCorners) SETERRQ2(PETSC_ERR_ARG_WRONG, "Element %d has no node with field %d defined", elem, field);
1058: /* Find closest node with field defined */
1059: for(corner = 1; corner < numCorners; corner++) {
1060: MeshGetNodeFromElement(mesh, elem, corner, &nearNode);
1061: MeshGetNodeCoords(mesh, nearNode, &nx, &ny, &nz);
1062: MeshGetNodeBoundary(mesh, nearNode, &bd);
1063: FieldClassMapGetNodeClass(map, nearNode, &nclass);
1064: FieldClassMapIsDefined(map, field, nclass, &defined);
1065: dist = PetscSqr(nx - x) + PetscSqr(ny - y);
1066: if ((bd != 0) && (defined == PETSC_TRUE) && (dist < minDist)) {
1067: minDist = dist;
1068: minNode = nearNode;
1069: }
1070: }
1071: PartitionLocalToGlobalNodeIndex(p, minNode, &globalMinNode);
1072: } else {
1073: minNode = -1;
1074: globalMinNode = -1;
1075: }
1076: FieldClassMapDestroy(map);
1078: /* The minimum node might still be a ghost node */
1079: MPI_Allreduce(&globalMinNode, &allMinNode, 1, MPI_INT, MPI_MAX, grid->comm);
1080: if ((allMinNode >= startNode) && (allMinNode < endNode)) {
1081: *node = allMinNode - startNode;
1082: } else {
1083: *node = -1;
1084: }
1085: if (allMinNode < 0)
1086: PetscFunctionReturn(1);
1087: return(0);
1088: }
1090: /*@
1091: GridGetBoundarySize - Retrieves the size of the specified boundary for
1092: a given field.
1094: Not collective
1096: Input Parameters:
1097: + grid - The grid
1098: . bd - The boundary marker
1099: - field - The field
1101: Output Parameter:
1102: . size - The size of the boundary
1104: Level: intermediate
1106: .keywords: grid, boundary, size
1107: .seealso: GridGetBoundaryNext(), GridGetBoundaryStart()
1108: @*/
1109: int GridGetBoundarySize(Grid grid, int bd, int field, int *size)
1110: {
1111: int bdIndex;
1117: GridValidField(grid, field);
1118: if (grid->setupcalled == PETSC_FALSE) SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Grid must be setup first")
1119: ierr = MeshGetBoundaryIndex(grid->mesh, bd, &bdIndex);
1120: *size = grid->bdSize[bdIndex][field];
1121: return(0);
1122: }
1124: /*@
1125: GridGetBoundaryStart - Retrieves the canonical node number of the first node
1126: on the specified boundary in a class contained in a given field.
1128: Not collective
1130: Input Parameters:
1131: + grid - The grid
1132: . bd - The boundary marker
1133: . field - The field
1134: - ghost - Flag for including ghost nodes
1136: Output Parameters:
1137: + node - The canonical node number
1138: - nclass - The node class
1140: Level: intermediate
1142: .keywords: grid, boundary, start
1143: .seealso: GridGetBoundaryNext(), MeshGetBoundaryStart
1144: @*/
1145: int GridGetBoundaryStart(Grid grid, int bd, int field, PetscTruth ghost, int *node, int *nclass)
1146: {
1147: int f;
1154: if (grid->setupcalled == PETSC_FALSE) SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Grid must be setup first");
1155: /* Look for field in default class map */
1156: for(f = 0; f < grid->cm->numFields; f++) {
1157: if (grid->cm->fields[f] == field) break;
1158: }
1159: if (f == grid->cm->numFields) {
1160: SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE, "Field %d not present in the default class map", field);
1161: }
1162: (*grid->ops->getboundarystart)(grid, bd, f, ghost, grid->cm, node, nclass);
1163: return(0);
1164: }
1166: /*@
1167: GridGetBoundaryNext - Retrieves the canonical node number of the next node
1168: on the specified boundary in a class contained in a given field, with -1
1169: indicating boundary termination.
1171: Not collective
1173: Input Parameters:
1174: + grid - The grid
1175: . bd - The boundary marker
1176: . field - The field
1177: - ghost - Flag for including ghost nodes
1179: Output Parameters:
1180: + node - The canonical node number
1181: - nclass - The node class
1183: Level: intermediate
1185: .keywords: grid, boundary, next
1186: .seealso: GridGetBoundaryStart(), MeshGetBoundaryNext
1187: @*/
1188: int GridGetBoundaryNext(Grid grid, int bd, int field, PetscTruth ghost, int *node, int *nclass)
1189: {
1190: int f;
1197: if (grid->setupcalled == PETSC_FALSE) SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Grid must be setup first");
1198: /* Look for field in default class map */
1199: for(f = 0; f < grid->cm->numFields; f++) {
1200: if (grid->cm->fields[f] == field) break;
1201: }
1202: if (f == grid->cm->numFields) {
1203: SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE, "Field %d not present in the default class map", field);
1204: }
1205: (*grid->ops->getboundarynext)(grid, bd, field, ghost, grid->cm, node, nclass);
1206: return(0);
1207: }
1209: /*@
1210: GridRefineMesh - Refine a grid based on area constraints.
1212: Collective on Grid
1214: Input Parameters:
1215: + grid - The initial grid
1216: - area - A function which gives an area constraint when evaluated inside an element
1218: Output Parameter:
1219: . newgrid - The refined grid
1221: Level: advanced
1223: .keywords: grid, refinement
1224: .seealso: MeshRefine(), GridReformMesh()
1225: @*/
1226: int GridRefineMesh(Grid grid, PointFunction area, Grid *newgrid)
1227: {
1228: Mesh newMesh;
1229: int field;
1230: int ierr;
1235: MeshRefine(grid->mesh, area, &newMesh);
1236: GridCreate(newMesh, newgrid);
1237: MeshDestroy(newMesh);
1238: for(field = 0; field < grid->numFields; field++) {
1239: GridAddField(*newgrid, grid->fields[field].name, grid->fields[field].discType, grid->fields[field].numComp, 0);
1240: }
1241: GridSetType(*newgrid, grid->type_name);
1242: /* Copy registered operators in discretizations */
1243: /* Set parent: (*newgrid)->gridparent = grid; */
1245: return(0);
1246: }
1248: /*------------------------------------------ Distributed Data Functions ---------------------------------------------*/
1249: /*@
1250: GridGlobalToLocalGeneral - Scatters values including ghost variables
1251: from the given global vector into the given ghost vector.
1253: Collective on Grid
1255: Input Parameters:
1256: + grid - The grid
1257: . vec - The global vector
1258: . mode - The insertion mode, either INSERT_VALUES or ADD_VALUES
1259: - scatter - The scatter
1261: Output Parameters:
1262: . ghostVec - The local vector
1264: Level: intermediate
1266: .keywords: grid, scatter, global, local
1267: .seealso: GridGlobalToLocal(), GridLocalToGlobal(), GVecGlobalToLocal()
1268: @*/
1269: int GridGlobalToLocalGeneral(Grid grid, GVec vec, Vec ghostVec, InsertMode mode, VecScatter scatter)
1270: {
1274: if (mode == ADD_VALUES) SETERRQ(PETSC_ERR_SUP, "No support for adding ghost values");
1275: VecScatterBegin(vec, ghostVec, mode, SCATTER_FORWARD, scatter);
1276: VecScatterEnd(vec, ghostVec, mode, SCATTER_FORWARD, scatter);
1277: return(0);
1278: }
1280: /*@
1281: GridGlobalToLocal - Scatters values including ghost variables
1282: from the given global vector into the local grid ghost vector.
1284: Collective on Grid
1286: Input Parameters:
1287: + grid - The grid
1288: . mode - The insertion mode, either INSERT_VALUES or ADD_VALUES
1289: - vec - The grid vector
1291: Level: intermediate
1293: .keywords: grid, scatter, global, local
1294: .seealso: GridGlobalToLocalGeneral(), GridLocalToGlobal(), GVecGlobalToLocal()
1295: @*/
1296: int GridGlobalToLocal(Grid grid, InsertMode mode, GVec vec)
1297: {
1298: Grid g;
1299: int ierr;
1305: GVecGetGrid(vec, &g);
1306: if (grid != g) SETERRQ(PETSC_ERR_ARG_INCOMP, "Vector does not arise from this grid");
1307: GridGlobalToLocalGeneral(grid, vec, grid->ghostVec, mode, grid->ghostScatter);
1308: return(0);
1309: }
1311: /*@
1312: GridLocalToGlobal - Scatters local processor values including ghost variables
1313: from the local grid ghost vector into the given global vector.
1315: Collective on Grid
1317: Input Parameters:
1318: + grid - The grid
1319: . mode - The insertion mode, either SET_VALUES or ADD_VALUES
1320: - vec - The grid vector
1322: Level: intermediate
1324: .seealso: GridGlobalToLocal(), GVecLocalToGlobal()
1325: @*/
1326: int GridLocalToGlobal(Grid grid, InsertMode mode, GVec vec)
1327: {
1328: Grid g;
1329: int ierr;
1335: GVecGetGrid(vec, &g);
1336: if (grid != g) SETERRQ(PETSC_ERR_ARG_WRONG, "Vector does not arise from this grid");
1337: VecScatterBegin(grid->ghostVec, vec, mode, SCATTER_REVERSE, grid->ghostScatter);
1338: VecScatterEnd(grid->ghostVec, vec, mode, SCATTER_REVERSE, grid->ghostScatter);
1339: return(0);
1340: }
1342: /*---------------------------------------------- InterGrid Functions ------------------------------------------------*/
1343: /*@
1344: GridInterpolateElementVec - Interpolates a given element vector from one discretization to another.
1346: Not collective
1348: Input Parameters:
1349: + grid - The original grid
1350: . field - The original field
1351: . vec - The original element vector
1352: . newGrid - The grid defining the new element vector
1353: - newField - The new field
1355: Output Parameter:
1356: . newVec - The interpolated element vector
1358: Level: advanced
1360: .keywords: grid, interpolation
1361: .seealso: ElementVecCreate()
1362: @*/
1363: int GridInterpolateElementVec(Grid grid, int field, ElementVec vec, Grid newGrid, int newField, ElementVec newVec)
1364: {
1372: GridValidField(grid, field);
1373: GridValidField(newGrid, newField);
1374: if (grid->fields[field].numComp != newGrid->fields[newField].numComp) {
1375: SETERRQ2(PETSC_ERR_ARG_INCOMP, "Fields have a different number of components %d != %d",
1376: grid->fields[field].numComp, newGrid->fields[newField].numComp);
1377: }
1378: DiscretizationInterpolateElementVec(grid->fields[field].disc, vec, newGrid->fields[newField].disc, newVec);
1379: return(0);
1380: }
1382: /*@
1383: GridCreateRestriction - Generates restriction matrix between two grids.
1385: Input Parameters:
1386: + vf - The fine grid
1387: - vc - The coarse grid
1389: Output Parameter:
1390: . gmat - A sparse matrix containing restriction
1392: Level: advanced
1394: @*/
1395: int GridCreateRestriction(Grid vf, Grid vc, GMat *gmat)
1396: {
1404: if (vf->setupcalled == PETSC_FALSE) {
1405: GridSetUp(vf);
1406: }
1407: if (vc->setupcalled == PETSC_FALSE) {
1408: GridSetUp(vc);
1409: }
1410: if (!vf->ops->gridcreaterestriction) SETERRQ(PETSC_ERR_SUP, " ");
1411: (*vf->ops->gridcreaterestriction)(vf, vc, gmat);
1412: return(0);
1413: }
1415: /*---------------------------------------------- Assembly Functions -------------------------------------------------*/
1416: /*@
1417: GridLocalToElementGeneral - Scatters values including ghost variables from the given ghost vector
1418: into the given element vector.
1420: Not collective
1422: Input Parameters:
1423: + grid - The grid
1424: . ghostVec - The vector of values (including ghsot points)
1425: . reduceVec - The vector of boundary values
1426: . reduceSystem - The flag for reducing boundary conditions
1427: . reduceElement - The flag for putting boundary values in the element vector
1428: - vec - The element vector
1430: WARNING:
1431: Make sure that the indices in the element vector are local indices.
1433: Note:
1434: If reduceSystem and reduceElement are PTESC_TRUE, then boundary values are
1435: placed in vec. If reduceElement is PETSC_FALSE, then zero is used instead.
1437: Level: advanced
1439: .keywords: grid, element, scatter
1440: .seealso: GridLocalToElement(), GridGlobalToLocal()
1441: @*/
1442: int GridLocalToElementGeneral(Grid grid, Vec ghostVec, Vec reduceVec, PetscTruth reduceSystem, PetscTruth reduceElement, ElementVec vec)
1443: {
1444: PetscScalar *array, *reduceArray;
1445: PetscScalar alpha = -grid->reduceAlpha;
1446: int elemSize = vec->reduceSize;
1447: int numOverlapVars, numReduceOverlapVars;
1448: int var;
1449: int ierr;
1452: VecGetSize(ghostVec, &numOverlapVars);
1453: VecGetArray(ghostVec, &array);
1454: if (reduceSystem == PETSC_TRUE) {
1455: VecGetSize(reduceVec, &numReduceOverlapVars);
1456: VecGetArray(reduceVec, &reduceArray);
1457: for(var = 0; var < elemSize; var++) {
1458: #ifdef PETSC_USE_BOPT_g
1459: if (vec->indices[var] >= numOverlapVars) {
1460: SETERRQ2(PETSC_ERR_ARG_CORRUPT, "Invalid index %d into ghost vector should be in [0,%d)",
1461: vec->indices[var], numOverlapVars);
1462: }
1463: #endif
1464: if (vec->indices[var] < 0) {
1465: if (reduceElement == PETSC_TRUE) {
1466: #ifdef PETSC_USE_BOPT_g
1467: if (-(vec->indices[var]+1) >= numReduceOverlapVars) {
1468: SETERRQ2(PETSC_ERR_ARG_CORRUPT, "Invalid index %d into ghost vector should be in [0,%d)",
1469: -(vec->indices[var]+1), numReduceOverlapVars);
1470: }
1471: #endif
1472: vec->array[var] = alpha*reduceArray[-(vec->indices[var]+1)];
1473: } else {
1474: vec->array[var] = 0.0;
1475: }
1476: } else {
1477: vec->array[var] = array[vec->indices[var]];
1478: }
1479: }
1480: VecRestoreArray(reduceVec, &reduceArray);
1481: } else {
1482: for(var = 0; var < elemSize; var++) {
1483: #ifdef PETSC_USE_BOPT_g
1484: if ((vec->indices[var] < 0) || (vec->indices[var] >= numOverlapVars)) {
1485: SETERRQ2(PETSC_ERR_ARG_CORRUPT, "Invalid index %d into ghost vector should be in [0,%d)",
1486: vec->indices[var], numOverlapVars);
1487: }
1488: #endif
1489: vec->array[var] = array[vec->indices[var]];
1490: }
1491: }
1492: VecRestoreArray(ghostVec, &array);
1493: return(0);
1494: }
1496: /*@
1497: GridLocalToElement - Scatters values including ghost variables from the local grid ghost vector
1498: into the given element vector.
1500: Not collective
1502: Input Parameters:
1503: + grid - The grid
1504: - vec - The element vector
1506: WARNING:
1507: Make sure that the indices in the element vector are local indices.
1509: Level: advanced
1511: .keywords: grid, element, scatter
1512: .seealso: GridLocalToElementGeneral(), GridGlobalToLocal()
1513: @*/
1514: int GridLocalToElement(Grid grid, ElementVec vec)
1515: {
1521: GridLocalToElementGeneral(grid, grid->ghostVec, grid->bdReduceVecCur, grid->reduceSystem, grid->reduceElement, vec);
1522: return(0);
1523: }
1525: /*-------------------------------------------- Evaluation Functions -------------------------------------------------*/
1526: /*@C
1527: GridEvaluateRhs - This function constructs the weak form of the functions and
1528: operators specified with GridSetRhsFunction(), GridSetRhsOperator(),
1529: and GridSetRhsNonlinearOperator().
1531: Collective on Grid
1533: Input Parameters:
1534: + grid - The grid
1535: . x - The current iterate
1536: - ctx - The optional user context
1538: Output Parameters
1539: . f - The function value
1541: Level: beginner
1543: .keywords: grid, rhs
1544: .seealso: GridEvaluateSystemMatrix(), GridSetRhsFunction(), GridSetRhsOperator(), GridSetRhsNonlinearOperator()
1545: @*/
1546: int GridEvaluateRhs(Grid grid, GVec x, GVec f, PetscObject ctx)
1547: {
1548: Mesh mesh;
1549: MeshMover mover;
1550: Grid meshVelGrid;
1551: PetscScalar alpha = -grid->reduceAlpha;
1552: int ierr;
1556: /* x can be PETSC_NULL */
1558: /* Setup mesh velocity calculation for ALE variables */
1559: if (grid->ALEActive == PETSC_TRUE) {
1560: GridGetMesh(grid, &mesh);
1561: MeshGetMover(mesh, &mover);
1562: MeshMoverGetVelocityGrid(mover, &meshVelGrid);
1563: GridSetUp(meshVelGrid);
1564: }
1565: /* Calculate Rhs */
1566: (*grid->ops->gridevaluaterhs)(grid, x, f, ctx);
1567: /* Add boundary values */
1568: if ((grid->reduceSystem == PETSC_TRUE) && (grid->reduceElement == PETSC_FALSE) && (grid->bdReduceMat != PETSC_NULL)) {
1569: /* Calculate contribution of constrained variables to the Rhs */
1570: MatMult(grid->bdReduceMat, grid->bdReduceVecCur, grid->reduceVec);
1571: /* Update Rhs */
1572: VecAXPY(&alpha, grid->reduceVec, f);
1573: }
1574: return(0);
1575: }
1577: /*@C GridEvaluateRhsFunction
1578: This function constructs the weak form of the functions specified with GridSetRhsFunction().
1580: Collective on Grid
1582: Input Parameters:
1583: + grid - The grid
1584: . x - The current iterate
1585: - ctx - The optional user context
1587: Output Parameter:
1588: . f - The function value
1590: Level: beginner
1592: .keywords grid, rhs, function
1593: .seealso GridEvaluateRhsOperator(), GridEvaluateRhs(), GridEvaluateSystemMatrix()
1594: @*/
1595: int GridEvaluateRhsFunction(Grid grid, GVec x, GVec f, void *ctx)
1596: {
1597: PetscTruth oldLinear = grid->activeOpTypes[1];
1598: PetscTruth oldNonlinear = grid->activeOpTypes[2];
1599: int ierr;
1603: /* x can be PETSC_NULL */
1605: /* Turn off computation of operators */
1606: grid->activeOpTypes[1] = PETSC_FALSE;
1607: grid->activeOpTypes[2] = PETSC_FALSE;
1608: GridEvaluateRhs(grid, x, f, (PetscObject) ctx);
1609: grid->activeOpTypes[1] = oldLinear;
1610: grid->activeOpTypes[2] = oldNonlinear;
1611: return(0);
1612: }
1614: /*@C GridEvaluateRhsOperator
1615: This function constructs the weak form of the operators specified with GridSetRhsOperator().
1617: Collective on Grid
1619: Input Parameters:
1620: + grid - The grid
1621: . x - The current iterate
1622: . linear - The flag for including linear operators
1623: . nonlinear - The flag for including nonlinear operators
1624: - ctx - The optional user context
1626: Output Parameter:
1627: . f - The operator value
1629: Level: beginner
1631: .keywords grid, rhs, operator
1632: .seealso GridEvaluateRhsFunction(), GridEvaluateRhs(), GridEvaluateSystemMatrix()
1633: @*/
1634: int GridEvaluateRhsOperator(Grid grid, GVec x, GVec f, PetscTruth linear, PetscTruth nonlinear, void *ctx)
1635: {
1636: PetscTruth oldFunction = grid->activeOpTypes[0];
1637: PetscTruth oldLinear = grid->activeOpTypes[1];
1638: PetscTruth oldNonlinear = grid->activeOpTypes[2];
1639: int ierr;
1643: /* x can be PETSC_NULL */
1645: /* Turn off computation of operators */
1646: grid->activeOpTypes[0] = PETSC_FALSE;
1647: grid->activeOpTypes[1] = linear;
1648: grid->activeOpTypes[2] = nonlinear;
1649: GridEvaluateRhs(grid, x, f, (PetscObject) ctx);
1650: grid->activeOpTypes[0] = oldFunction;
1651: grid->activeOpTypes[1] = oldLinear;
1652: grid->activeOpTypes[2] = oldNonlinear;
1653: return(0);
1654: }
1656: /*@C GridEvaluateSystemMatrix
1657: This function constructs the weak form of the linear operators
1658: specified with GridSetMatOperator().
1660: Collective on Grid
1662: Input Parameters:
1663: + grid - The grid
1664: . x - The current iterate
1665: - ctx - The optional user context
1667: Output Parameter:
1668: . f - The function value
1670: Level: beginner
1672: .keywords: grid, matrix
1673: .seealso: GridEvaluateRhs()
1674: @*/
1675: int GridEvaluateSystemMatrix(Grid grid, GVec x, GMat *J, GMat *M, MatStructure *flag, PetscObject ctx)
1676: {
1677: Mesh mesh;
1678: MeshMover mover;
1679: Grid meshVelGrid;
1680: #ifdef PETSC_HAVE_MATHEMATICA
1681: PetscTruth opt;
1682: #endif
1683: int ierr;
1687: if (grid->ALEActive == PETSC_TRUE) {
1688: GridGetMesh(grid, &mesh);
1689: MeshGetMover(mesh, &mover);
1690: MeshMoverGetVelocityGrid(mover, &meshVelGrid);
1691: GridSetUp(meshVelGrid);
1692: }
1693: if (grid->isMatrixFree == PETSC_TRUE) {
1694: /* Should probably set this as the matrix context, but there is no MatShellSetContext() */
1695: grid->matrixFreeArg = x;
1696: grid->matrixFreeContext = ctx;
1697: } else {
1698: (*grid->ops->gridevaluatesystemmatrix)(grid, x, J, M, flag, ctx);
1699: }
1700: #ifdef PETSC_HAVE_MATHEMATICA
1701: /* Debugging */
1702: PetscOptionsHasName(PETSC_NULL, "-trace_grid_math", &opt);
1703: if (opt == PETSC_TRUE) {
1704: ViewerMathematicaSetName(PETSC_VIEWER_MATHEMATICA_WORLD, "jac");
1705: MatView(*J, PETSC_VIEWER_MATHEMATICA_WORLD);
1706: ViewerMathematicaClearName(PETSC_VIEWER_MATHEMATICA_WORLD);
1707: }
1708: #endif
1709: return(0);
1710: }
1712: #if 0
1713: /*----------------------------------------- Parallel Communication Functions ----------------------------------------*/
1714: /*@C
1715: GridGhostExchange - This functions transfers data between local and ghost storage without a predefined mapping.
1717: Collective on MPI_Comm
1719: Input Parameters:
1720: + comm - The communicator
1721: . numGhosts - The number of ghosts in this domain
1722: . ghostProcs - The processor from which to obtain each ghost
1723: . ghostIndices - The global index for each ghost
1724: . dataType - The type of the variables
1725: . firstVar - The first variable on each processor
1726: . addv - The insert mode, INSERT_VALUES or ADD_VALUES
1727: - mode - The direction of the transfer, SCATTER_FORWARD or SCATTER_REVERSE
1729: Output Paramters:
1730: + locVars - The local variable array
1731: - ghostVars - The ghost variables
1733: Note:
1734: The data in ghostVars is assumed contiguous and implicitly indexed by the order of
1735: ghostProcs and ghostIndices. The SCATTER_FORWARD mode will take the requested data
1736: from locVars and copy it to ghostVars in the order specified by ghostIndices. The
1737: SCATTER_REVERSE mode will take data from ghostVars and copy it to locVars.
1739: Level: advanced
1741: .keywords: ghost, exchange, grid
1742: .seealso: GridGlobalToLocal(), GridLocalToGlobal()
1743: @*/
1744: int GridGhostExchange(MPI_Comm comm, int numGhosts, int *ghostProcs, int *ghostIndices, PetscDataType dataType,
1745: int *firstVar, InsertMode addv, ScatterMode mode, void *locVars, void *ghostVars)
1746: {
1747: int *numSendGhosts; /* The number of ghosts from each domain */
1748: int *numRecvGhosts; /* The number of local variables which are ghosts in each domain */
1749: int *sumSendGhosts; /* The prefix sums of numSendGhosts */
1750: int *sumRecvGhosts; /* The prefix sums of numRecvGhosts */
1751: int *offsets; /* The offset into the send array for each domain */
1752: int totSendGhosts; /* The number of ghosts to request variables for */
1753: int totRecvGhosts; /* The number of nodes to provide class info about */
1754: int *sendIndices; /* The canonical indices of ghosts in this domain */
1755: int *recvIndices; /* The canonical indices of ghosts to return variables for */
1756: char *tempVars; /* The variables of the requested or submitted ghosts */
1757: char *locBytes = (char *) locVars;
1758: MPI_Datatype MPIType;
1759: int typeSize;
1760: #ifdef PETSC_USE_BOPT_g
1761: int numLocVars;
1762: #endif
1763: int numProcs, rank;
1764: int proc, ghost, locIndex, byte;
1765: int ierr;
1768: /* Initialize communication */
1769: MPI_Comm_size(comm, &numProcs);
1770: MPI_Comm_rank(comm, &rank);
1771: PetscMalloc(numProcs * sizeof(int), &numSendGhosts);
1772: PetscMalloc(numProcs * sizeof(int), &numRecvGhosts);
1773: PetscMalloc(numProcs * sizeof(int), &sumSendGhosts);
1774: PetscMalloc(numProcs * sizeof(int), &sumRecvGhosts);
1775: PetscMalloc(numProcs * sizeof(int), &offsets);
1776: PetscMemzero(numSendGhosts, numProcs * sizeof(int));
1777: PetscMemzero(numRecvGhosts, numProcs * sizeof(int));
1778: PetscMemzero(sumSendGhosts, numProcs * sizeof(int));
1779: PetscMemzero(sumRecvGhosts, numProcs * sizeof(int));
1780: PetscMemzero(offsets, numProcs * sizeof(int));
1781: #ifdef PETSC_USE_BOPT_g
1782: numLocVars = firstVar[rank+1] - firstVar[rank];
1783: #endif
1785: /* Get number of ghosts needed from each processor */
1786: for(ghost = 0; ghost < numGhosts; ghost++)
1787: numSendGhosts[ghostProcs[ghost]]++;
1789: /* Get number of ghosts to provide variables for */
1790: MPI_Alltoall(numSendGhosts, 1, MPI_INT, numRecvGhosts, 1, MPI_INT, comm);
1791: for(proc = 1; proc < numProcs; proc++) {
1792: sumSendGhosts[proc] = sumSendGhosts[proc-1] + numSendGhosts[proc-1];
1793: sumRecvGhosts[proc] = sumRecvGhosts[proc-1] + numRecvGhosts[proc-1];
1794: offsets[proc] = sumSendGhosts[proc];
1795: }
1796: totSendGhosts = sumSendGhosts[numProcs-1] + numSendGhosts[numProcs-1];
1797: totRecvGhosts = sumRecvGhosts[numProcs-1] + numRecvGhosts[numProcs-1];
1798: if (numGhosts != totSendGhosts) {
1799: SETERRQ2(PETSC_ERR_PLIB, "Invalid number of ghosts %d in send, should be %d", totSendGhosts, numGhosts);
1800: }
1802: PetscDataTypeGetSize(dataType, &typeSize);
1803: if (totSendGhosts) {
1804: sendIndices = (int *) PetscMalloc(totSendGhosts * sizeof(int)); CHKERRQ(sendIndices);
1805: }
1806: if (totRecvGhosts) {
1807: recvIndices = (int *) PetscMalloc(totRecvGhosts * sizeof(int)); CHKERRQ(recvIndices);
1808: tempVars = (char *) PetscMalloc(totRecvGhosts * typeSize); CHKERRQ(tempVars);
1809: }
1811: /* Must order ghosts by processor */
1812: for(ghost = 0; ghost < numGhosts; ghost++)
1813: sendIndices[offsets[ghostProcs[ghost]]++] = ghostIndices[ghost];
1815: /* Get canonical indices of ghosts to provide variables for */
1816: MPI_Alltoallv(sendIndices, numSendGhosts, sumSendGhosts, MPI_INT,
1817: recvIndices, numRecvGhosts, sumRecvGhosts, MPI_INT, comm);
1818:
1820: switch(mode)
1821: {
1822: case SCATTER_FORWARD:
1823: /* Get ghost variables */
1824: if (addv == INSERT_VALUES) {
1825: for(ghost = 0; ghost < totRecvGhosts; ghost++)
1826: {
1827: locIndex = recvIndices[ghost] - firstVar[rank];
1828: #ifdef PETSC_USE_BOPT_g
1829: if ((locIndex < 0) || (locIndex >= numLocVars)) {
1830: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Invalid ghost index %d, not in [0,%d)", locIndex, numLocVars);
1831: }
1832: #endif
1833: for(byte = 0; byte < typeSize; byte++)
1834: tempVars[ghost*typeSize+byte] = locBytes[locIndex*typeSize+byte];
1835: }
1836: } else {
1837: for(ghost = 0; ghost < totRecvGhosts; ghost++)
1838: {
1839: locIndex = recvIndices[ghost] - firstVar[rank];
1840: #ifdef PETSC_USE_BOPT_g
1841: if ((locIndex < 0) || (locIndex >= numLocVars)) {
1842: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Invalid ghost index %d, not in [0,%d)", locIndex, numLocVars);
1843: }
1844: #endif
1845: for(byte = 0; byte < typeSize; byte++)
1846: tempVars[ghost*typeSize+byte] += locBytes[locIndex*typeSize+byte];
1847: }
1848: }
1850: /* Communicate local variables to ghost storage */
1851: PetscDataTypeToMPIDataType(dataType, &MPIType);
1852: MPI_Alltoallv(tempVars, numRecvGhosts, sumRecvGhosts, MPIType,
1853: ghostVars, numSendGhosts, sumSendGhosts, MPIType, comm);
1854:
1855: break;
1856: case SCATTER_REVERSE:
1857: /* Communicate ghost variables to local storage */
1858: PetscDataTypeToMPIDataType(dataType, &MPIType);
1859: MPI_Alltoallv(ghostVars, numSendGhosts, sumSendGhosts, MPIType,
1860: tempVars, numRecvGhosts, sumRecvGhosts, MPIType, comm);
1861:
1863: /* Get ghost variables */
1864: if (addv == INSERT_VALUES) {
1865: for(ghost = 0; ghost < totRecvGhosts; ghost++)
1866: {
1867: locIndex = recvIndices[ghost] - firstVar[rank];
1868: #ifdef PETSC_USE_BOPT_g
1869: if ((locIndex < 0) || (locIndex >= numLocVars)) {
1870: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Invalid ghost index %d, not in [0,%d)", locIndex, numLocVars);
1871: }
1872: #endif
1873: for(byte = 0; byte < typeSize; byte++)
1874: locBytes[locIndex*typeSize+byte] = tempVars[ghost*typeSize+byte];
1875: }
1876: } else {
1877: /* There must be a better way to do this -- Ask Bill */
1878: if (typeSize == sizeof(int)) {
1879: int *tempInt = (int *) tempVars;
1880: int *locInt = (int *) locVars;
1882: for(ghost = 0; ghost < totRecvGhosts; ghost++) {
1883: locIndex = recvIndices[ghost] - firstVar[rank];
1884: #ifdef PETSC_USE_BOPT_g
1885: if ((locIndex < 0) || (locIndex >= numLocVars)) {
1886: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Invalid ghost index %d, not in [0,%d)", locIndex, numLocVars);
1887: }
1888: #endif
1889: locInt[locIndex] += tempInt[ghost];
1890: }
1891: } else if (typeSize == sizeof(long int)) {
1892: long int *tempLongInt = (long int *) tempVars;
1893: long int *locLongInt = (long int *) locVars;
1895: for(ghost = 0; ghost < totRecvGhosts; ghost++) {
1896: locIndex = recvIndices[ghost] - firstVar[rank];
1897: #ifdef PETSC_USE_BOPT_g
1898: if ((locIndex < 0) || (locIndex >= numLocVars)) {
1899: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Invalid ghost index %d, not in [0,%d)", locIndex, numLocVars);
1900: }
1901: #endif
1902: locLongInt[locIndex] += tempLongInt[ghost];
1903: }
1904: } else {
1905: for(ghost = 0; ghost < totRecvGhosts; ghost++) {
1906: locIndex = recvIndices[ghost] - firstVar[rank];
1907: #ifdef PETSC_USE_BOPT_g
1908: if ((locIndex < 0) || (locIndex >= numLocVars)) {
1909: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "Invalid ghost index %d, not in [0,%d)", locIndex, numLocVars);
1910: }
1911: #endif
1912: for(byte = 0; byte < typeSize; byte++)
1913: locBytes[locIndex*typeSize+byte] += tempVars[ghost*typeSize+byte];
1914: }
1915: }
1916: }
1917: break;
1918: default:
1919: SETERRQ1(PETSC_ERR_ARG_WRONG, "Invalid scatter mode %d", mode);
1920: }
1922: /* Cleanup */
1923: PetscFree(numSendGhosts);
1924: PetscFree(numRecvGhosts);
1925: PetscFree(sumSendGhosts);
1926: PetscFree(sumRecvGhosts);
1927: PetscFree(offsets);
1928: if (totSendGhosts) {
1929: PetscFree(sendIndices);
1930: }
1931: if (totRecvGhosts) {
1932: PetscFree(recvIndices);
1933: PetscFree(tempVars);
1934: }
1935: return(0);
1936: }
1937: #endif