Actual source code: ex1.c
1: static char help[] = "Landau collision operator driver\n\n";
3: #include <petscts.h>
4: #include <petsclandau.h>
6: typedef struct {
7: PetscInt grid_target, batch_target, field_target;
8: PetscBool init;
9: } ex1Ctx;
11: /*
12: call back method for DMPlexLandauAccess:
14: Input Parameters:
15: . dm - a DM for this field
16: - local_field - the local index in the grid for this field
17: . grid - the grid index
18: + b_id - the batch index
19: - vctx - a user context
21: Input/Output Parameters:
22: + x - Vector to data to
24: */
25: PetscErrorCode landau_field_print_access_callback(DM dm, Vec x, PetscInt local_field, PetscInt grid, PetscInt b_id, void *vctx)
26: {
27: ex1Ctx *user = (ex1Ctx *)vctx;
28: LandauCtx *ctx;
30: PetscFunctionBegin;
31: PetscCall(DMGetApplicationContext(dm, &ctx));
32: if (grid == user->grid_target && b_id == user->batch_target && local_field == user->field_target) {
33: PetscScalar one = 1.0e10;
35: PetscCall(VecSet(x, one));
36: if (!user->init) {
37: PetscCall(PetscObjectSetName((PetscObject)dm, "single"));
38: PetscCall(DMViewFromOptions(dm, NULL, "-ex1_dm_view")); // DMCreateSubDM does seem to give the DM's fild the name from the original DM
39: user->init = PETSC_TRUE;
40: }
41: PetscCall(PetscObjectSetName((PetscObject)x, "u")); // this gives the vector a nicer name, DMCreateSubDM could do this for us and get the correct name
42: PetscCall(VecViewFromOptions(x, NULL, "-ex1_vec_view")); // this causes diffs with Kokkos, etc
43: PetscCall(PetscInfo(dm, "DMPlexLandauAccess user 'add' method to grid %" PetscInt_FMT ", batch %" PetscInt_FMT " and species %" PetscInt_FMT "\n", grid, b_id, ctx->species_offset[grid] + local_field));
44: }
45: PetscFunctionReturn(PETSC_SUCCESS);
46: }
48: int main(int argc, char **argv)
49: {
50: DM pack;
51: Vec X, X_0;
52: PetscInt dim = 2;
53: TS ts;
54: Mat J;
55: SNES snes;
56: KSP ksp;
57: PC pc;
58: SNESLineSearch linesearch;
59: PetscReal time;
61: PetscFunctionBeginUser;
62: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
63: PetscCall(PetscOptionsGetInt(NULL, NULL, "-dim", &dim, NULL));
64: /* Create a mesh */
65: PetscCall(DMPlexLandauCreateVelocitySpace(PETSC_COMM_SELF, dim, "", &X, &J, &pack));
66: PetscCall(DMSetUp(pack));
67: PetscCall(VecDuplicate(X, &X_0));
68: PetscCall(VecCopy(X, X_0));
69: PetscCall(DMPlexLandauPrintNorms(X, 0));
70: PetscCall(DMSetOutputSequenceNumber(pack, 0, 0.0));
71: /* Create timestepping solver context */
72: PetscCall(TSCreate(PETSC_COMM_SELF, &ts));
73: PetscCall(TSSetDM(ts, pack));
74: PetscCall(TSGetSNES(ts, &snes));
75: PetscCall(SNESGetLineSearch(snes, &linesearch));
76: PetscCall(SNESLineSearchSetType(linesearch, SNESLINESEARCHBASIC));
77: PetscCall(TSSetIFunction(ts, NULL, DMPlexLandauIFunction, NULL));
78: PetscCall(TSSetIJacobian(ts, J, J, DMPlexLandauIJacobian, NULL));
79: PetscCall(TSSetExactFinalTime(ts, TS_EXACTFINALTIME_STEPOVER));
80: PetscCall(SNESGetKSP(snes, &ksp));
81: PetscCall(KSPGetPC(ksp, &pc));
82: PetscCall(TSSetFromOptions(ts));
83: PetscCall(TSSetSolution(ts, X));
84: PetscCall(TSSolve(ts, X));
85: PetscCall(DMPlexLandauPrintNorms(X, 1));
86: PetscCall(TSGetTime(ts, &time));
87: PetscCall(DMSetOutputSequenceNumber(pack, 1, time));
88: PetscCall(VecAXPY(X, -1, X_0));
89: { /* test add field method */
90: ex1Ctx *user;
91: LandauCtx *ctx;
92: PetscCall(DMGetApplicationContext(pack, &ctx));
93: PetscCall(PetscNew(&user));
94: user->grid_target = 1; // 2nd ion species
95: user->field_target = 1;
96: PetscCall(DMPlexLandauAccess(pack, X, landau_field_print_access_callback, user));
97: PetscCall(PetscFree(user));
98: }
99: /* clean up */
100: PetscCall(DMPlexLandauDestroyVelocitySpace(&pack));
101: PetscCall(TSDestroy(&ts));
102: PetscCall(VecDestroy(&X));
103: PetscCall(VecDestroy(&X_0));
104: PetscCall(PetscFinalize());
105: return 0;
106: }
108: /*TEST
109: testset:
110: requires: p4est !complex double defined(PETSC_USE_DMLANDAU_2D)
111: output_file: output/ex1_0.out
112: filter: grep -v "% type: seq"
113: args: -dm_landau_num_species_grid 1,2 -petscspace_degree 3 -petscspace_poly_tensor 1 -dm_landau_type p4est -dm_landau_ion_masses 2,4 -dm_landau_ion_charges 1,18 -dm_landau_thermal_temps 5,5,.5 -dm_landau_n 1.00018,1,1e-5 -dm_landau_n_0 1e20 -ts_monitor -snes_rtol 1.e-14 -snes_stol 1.e-14 -snes_monitor -snes_converged_reason -ts_type arkimex -ts_arkimex_type 1bee -ts_max_snes_failures -1 -ts_rtol 1e-1 -ts_dt 1.e-1 -ts_max_time 1 -ts_adapt_clip .5,1.25 -ts_adapt_scale_solve_failed 0.75 -ts_adapt_time_step_increase_delay 5 -ts_max_steps 1 -pc_type lu -ksp_type preonly -dm_landau_amr_levels_max 2,1 -ex1_dm_view -ex1_vec_view ::ascii_matlab -dm_landau_num_cells 2,2 -dm_landau_domain_max_par 6,6 -dm_landau_domain_max_perp 4,4
114: test:
115: suffix: cpu
116: args: -dm_landau_device_type cpu
117: test:
118: suffix: kokkos
119: requires: kokkos_kernels !defined(PETSC_HAVE_CUDA_CLANG)
120: args: -dm_landau_device_type kokkos -dm_mat_type aijkokkos -dm_vec_type kokkos
121: test:
122: suffix: cuda
123: requires: cuda !defined(PETSC_HAVE_CUDA_CLANG)
124: args: -dm_landau_device_type cuda -dm_mat_type aijcusparse -dm_vec_type cuda -mat_cusparse_use_cpu_solve
126: TEST*/