/projects/FASTMath/ATPESC-2015/examples/petsc
export PETSC_DIR=/projects/FASTMath/ATPESC-2015/install/fm-2015/petsc/3.6.1.1/powerpc64-bgq-linux-gcc-4.4/
qsub -I -n 32 -t 30 -q ATPESC2015 -A ATPESC2015
PETSc comes with a large number of example codes to illustrate usage. The three main sets are in:
WHAT THIS EXAMPLE DEMONSTRATES:
FURTHER DETAILS:
DO THE FOLLOWING:
cd petsc/src/ksp/ksp/examples/tutorials make ex50
mpiexec -n 1 ./ex50 -da_grid_x 4 -da_grid_y 4 -mat_view [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 1 ./ex50 -da_grid_x 4 -da_grid_y 4 -mat_view runjob --block $COBALT_PARTNAME -p 16 --np 1 : ./ex50 -da_grid_x 4 -da_grid_y 4 -mat_view
mpiexec -n 16 ./ex50 -da_grid_x 120 -da_grid_y 120 -pc_type lu -pc_factor_mat_solver_package superlu_dist -ksp_monitor -ksp_view [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex50 -da_grid_x 120 -da_grid_y 120 -pc_type lu -pc_factor_mat_solver_package superlu_dist -ksp_monitor -ksp_view runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex50 -da_grid_x 120 -da_grid_y 120 -pc_type lu -pc_factor_mat_solver_package superlu_dist -ksp_monitor -ksp_view
mpiexec -n 16 ./ex50 -da_grid_x 2049 -da_grid_y 2049 -pc_type mg -pc_mg_levels 10 -ksp_monitor [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex50 -da_grid_x 2049 -da_grid_y 2049 -pc_type mg -pc_mg_levels 10 -ksp_monitor runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex50 -da_grid_x 2049 -da_grid_y 2049 -pc_type mg -pc_mg_levels 10 -ksp_monitor
WHAT THIS EXAMPLE DEMONSTRATES:
FURTHER DETAILS:
DO THE FOLLOWING:
cd petsc/src/ts/examples/tutorials make ex2
mpiexec -n 1 ./ex2 -ts_max_steps 10 -ts_monitor [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 1 ./ex2 -ts_max_steps 10 -ts_monitor runjob --block $COBALT_PARTNAME -p 16 --np 1 : ./ex2 -ts_max_steps 10 -ts_monitor
mpiexec -n 4 ./ex2 -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 4 ./ex2 -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor runjob --block $COBALT_PARTNAME -p 16 --np 4 : ./ex2 -ts_max_steps 10 -ts_monitor -snes_monitor -ksp_monitor
mpiexec -n 16 ./ex2 -ts_max_steps 10 -ts_monitor -M 128 [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex2 -ts_max_steps 10 -ts_monitor -M 128 runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex2 -ts_max_steps 10 -ts_monitor -M 128
WHAT THIS EXAMPLE DEMONSTRATES:
FURTHER DETAILS:
DO THE FOLLOWING:
cd petsc/src/snes/examples/tutorials/ make ex19
mpiexec -n 16 ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view
mpiexec -n 16 ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view -pc_type mg [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view -pc_type mg runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view -pc_type mgNote this requires many fewer iterations than the default solver
mpiexec -n 16 ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view -pc_type hypre [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view -pc_type hypre runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex19 -da_refine 6 -snes_monitor -ksp_monitor -snes_view -pc_type hypreNote this requires many fewer iterations than the default solver but requires more iterations than geometric multigrid
mpiexec -n 4 ./ex19 -da_refine 6 -log_summary [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 4 ./ex19 -da_refine 6 -log_summary runjob --block $COBALT_PARTNAME -p 16 --np 4 : ./ex19 -da_refine 6 -log_summarySearch for the line beginning with SNESSolve, the fourth column gives the time for the nonlinear solve.
mpiexec -n 4 ./ex19 -da_refine 6 -log_summary -pc_type mg [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 4 ./ex19 -da_refine 6 -log_summary -pc_type mg runjob --block $COBALT_PARTNAME -p 16 --np 4 : ./ex19 -da_refine 6 -log_summary -pc_type mgCompare the runtime for SNESSolve to the case with the default solver
mpiexec -n 16 ./ex19 -da_refine 6 -log_summary [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex19 -da_refine 6 -log_summary runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex19 -da_refine 6 -log_summaryCompare the runtime for SNESSolve to the 4 processor case with the default solver, what is the speedup?
mpiexec -n 16 ./ex19 -da_refine 6 -log_summary -pc_type mg [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex19 -da_refine 6 -log_summary -pc_type mg runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex19 -da_refine 6 -log_summary -pc_type mgCompare the runtime for the SNESSolve to the 4 processor case with multigrid, what is the speedup? Why is the speedup for multigrid lower than the speedup for the default solver?
WHAT THIS EXAMPLE DEMONSTRATES:
FURTHER DETAILS:
DO THE FOLLOWING:
cd petsc/src/ksp/ksp/examples/tutorials make ex42
mpiexec -n 4 ./ex42 -stokes_ksp_monitor [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 4 ./ex42 -stokes_ksp_monitor runjob --block $COBALT_PARTNAME -p 16 --np 4 : ./ex42 -stokes_ksp_monitorNote the poor convergence for even a very small problem
mpiexec -n 4 ./ex42 -stokes_ksp_monitor -stokes_pc_type fieldsplit -stokes_pc_fieldsplit_type schur [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 4 ./ex42 -stokes_ksp_monitor -stokes_pc_type fieldsplit -stokes_pc_fieldsplit_type schur runjob --block $COBALT_PARTNAME -p 16 --np 4 : ./ex42 -stokes_ksp_monitor -stokes_pc_type fieldsplit -stokes_pc_fieldsplit_type schur
mpiexec -n 16 ./ex42 -mx 40 -stokes_ksp_monitor -stokes_pc_type fieldsplit -stokes_pc_fieldsplit_type schur [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex42 -mx 40 -stokes_ksp_monitor -stokes_pc_type fieldsplit -stokes_pc_fieldsplit_type schur runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex42 -mx 40 -stokes_ksp_monitor -stokes_pc_type fieldsplit -stokes_pc_fieldsplit_type schur
WHAT THIS EXAMPLE DEMONSTRATES:
FURTHER DETAILS:
DO THE FOLLOWING:
cd petsc/src/ts/examples/tutorials make ex11
mpiexec -n 1 ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/sevenside.exo [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 1 ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/sevenside.exo runjob --block $COBALT_PARTNAME -p 16 --np 1 : ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/sevenside.exo
mpiexec -n 1 ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/sevenside.exo -ts_type rosw [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 1 ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/sevenside.exo -ts_type rosw runjob --block $COBALT_PARTNAME -p 16 --np 1 : ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/sevenside.exo -ts_type rosw
mpiexec -n 16 ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/annulus-20.exo -monitor Error -advect_sol_type bump -petscfv_type leastsquares -petsclimiter_type sin [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 16 ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/annulus-20.exo -monitor Error -advect_sol_type bump -petscfv_type leastsquares -petsclimiter_type sin runjob --block $COBALT_PARTNAME -p 16 --np 16 : ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/annulus-20.exo -monitor Error -advect_sol_type bump -petscfv_type leastsquares -petsclimiter_type sinCompare turning to the error after turning off reconstruction.
mpiexec -n 4 ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/annulus-20.exo -physics sw -monitor Height,Energy -petscfv_type leastsquares -petsclimiter_type minmod [Expected output] qsub -t 10 -q ATPESC2015 -A ATPESC2015 --mode c16 -n 1 --proccount 4 ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/annulus-20.exo -physics sw -monitor Height,Energy -petscfv_type leastsquares -petsclimiter_type minmod runjob --block $COBALT_PARTNAME -p 16 --np 4 : ./ex11 -f ${PETSC_DIR}/share/petsc/datafiles/meshes/annulus-20.exo -physics sw -monitor Height,Energy -petscfv_type leastsquares -petsclimiter_type minmod