From 34618cbee7faf03b3d8f3fdc57a8eb908fdb1363 Mon Sep 17 00:00:00 2001 From: Randy LeVeque Date: Thu, 7 Nov 2024 09:15:07 -0500 Subject: [PATCH 1/2] WIP: updated settings for PETSc / MPI for Bouss codes based on latest version of PETSc --- examples/bouss/petscMPIoptions | 32 +++++++++++-------------- examples/bouss/radial_flat/Makefile | 31 +++++++++++++++++-------- examples/bouss/setenv.sh | 36 ++++++++++++++++++++++++----- 3 files changed, 66 insertions(+), 33 deletions(-) diff --git a/examples/bouss/petscMPIoptions b/examples/bouss/petscMPIoptions index d545ac1c9..1657a75a8 100644 --- a/examples/bouss/petscMPIoptions +++ b/examples/bouss/petscMPIoptions @@ -10,33 +10,29 @@ # https://petsc.org/release/manualpages/PC/PCSetFromOptions/ # set min numbers of matrix rows per MPI rank (default is 10000) --pc_mpi_minimum_count_per_rank 5000 +-mpi_linear_solver_server_minimum_count_per_rank 5000 +-mpi_linear_solver_server +-mpi_linear_solver_server_view +-mpi_linear_solver_server_use_shared_memory false -# convergence criterion for linear solver (larger than PETSc default): --mpi_ksp_rtol 1.e-9 +# Krylov linear solver: +-ksp_type gmres +-ksp_max_it 200 +-ksp_reuse_preconditioner -# linear solver: --mpi_linear_solver_server --ksp_type preonly --mpi_ksp_type gmres --mpi_ksp_max_it 200 --mpi_ksp_reuse_preconditioner +# convergence criterion for linear solver (larger than PETSc default): +-ksp_rtol 1.e-9 # preconditioner: --pc_type mpi --mpi_pc_type gamg --mpi_pc_gamg_symmetrize_graph true --mpi_pc_gamg_sym_graph true --mpi_linear_solver_server_view +-pc_type gamg + # debug options: #-fp_trap off #-log_view #-start_in_debugger -debugger_rank 0 -#-mpi_ksp_view_mat binary -#-mpi_ksp_view_rhs binary -#-mpi_ksp_monitor_true_residual -#-mpi_ksp_monitor +#-ksp_monitor +#-ksp_view #-info # test if any options are not used: diff --git a/examples/bouss/radial_flat/Makefile b/examples/bouss/radial_flat/Makefile index 9d007c598..f6cd6fa3a 100644 --- a/examples/bouss/radial_flat/Makefile +++ b/examples/bouss/radial_flat/Makefile @@ -36,30 +36,36 @@ ifndef PETSC_DIR $(error PETSC_DIR not set) endif -ifndef PETSC_ARCH - $(error PETSC_ARCH not set) -endif - ifndef PETSC_OPTIONS PETSC_OPTIONS=MISSING $(error PETSC_OPTIONS must be declared as environment variable) endif +ifndef CLAW_MPIEXEC + CLAW_MPIEXEC=MISSING + $(error CLAW_MPIEXEC must be declared as environment variable) +endif + +ifndef CLAW_MPIFC + CLAW_MPIFC=MISSING + $(error Fortran compiler CLAW_MPIFC must be declared as environment variable) +endif + +# Environment variable FC should be set to fortran compiler, e.g. gfortran +FC = ${CLAW_MPIFC} + # How many MPI processes to use: BOUSS_MPI_PROCS ?= 6 EXE = $(PWD)/xgeoclaw -RUNEXE="${PETSC_DIR}/${PETSC_ARCH}/bin/mpiexec -n ${BOUSS_MPI_PROCS}" +#RUNEXE="${PETSC_DIR}/${PETSC_ARCH}/bin/mpiexec -n ${BOUSS_MPI_PROCS}" +RUNEXE="${CLAW_MPIEXEC} -n ${BOUSS_MPI_PROCS}" SETRUN_FILE = setrun.py # File containing function to make data OUTDIR = _output # Directory for output SETPLOT_FILE = setplot.py # File containing function to set plots PLOTDIR = _plots # Directory for plots -# Environment variable FC should be set to fortran compiler, e.g. gfortran - -FC = gfortran - # Some compiler flags below are needed for PETSc PETSC_INCLUDE = $(PETSC_DIR)/include $(PETSC_DIR)/$(PETSC_ARCH)/include INCLUDE += $(PETSC_INCLUDE) @@ -121,6 +127,13 @@ check: @env | grep PETSC_OPTIONS @echo PETSC_DIR = $(PETSC_DIR) @echo PETSC_ARCH = $(PETSC_ARCH) + @echo CLAW_MPIEXEC = $(CLAW_MPIEXEC) @echo RUNEXE = $(RUNEXE) + @echo EXE = $(EXE) + @echo CLAW_MPIFC = $(CLAW_MPIFC) + @echo FC = $(FC) @echo FFLAGS = $(FFLAGS) + @echo LFLAGS = $(LFLAGS) + @echo OUTDIR = $(OUTDIR) + @echo PLOTDIR = $(PLOTDIR) @echo =================== diff --git a/examples/bouss/setenv.sh b/examples/bouss/setenv.sh index e67f8c3cd..ab17e33bd 100644 --- a/examples/bouss/setenv.sh +++ b/examples/bouss/setenv.sh @@ -3,15 +3,39 @@ # to run the Bouss version of GeoClaw with MPI and OpenMP. # Adjust as needed for your system... -# You also need to set CLAW, FC, and perhaps PYTHONPATH - # For more information, see # https://www.clawpack.org/bouss2d.html # https://www.clawpack.org/setenv.html -export PETSC_DIR=/full/path/to/petsc -export PETSC_ARCH=arch-darwin-c-opt -export PETSC_OPTIONS="-options_file $CLAW/geoclaw/examples/bouss/petscMPIoptions" +# You also need to set CLAW and perhaps PYTHONPATH, see: +# https://www.clawpack.org/setenv.html + +echo CLAW is set to $CLAW + +# path to PETSc installation: +export PETSC_DIR=/full/path/to/petsc # NEED TO FIX! + +# PETSC_ARCH is only needed if PETSc is installed inside the PETSc directory. +# For PETSc installs by conda or package managers, it should not be set. +#export PETSC_ARCH= +export PETSC_ARCH=arch-darwin-c-opt # NEED TO FIX! + +# You may want to use a different version of petscMPIoptions +# This setting uses the version in this directory: +export PETSC_OPTIONS="-options_file $PWD/petscMPIoptions" + export OMP_NUM_THREADS=6 -export BOUSS_MPI_PROCS=6 # only used in Clawpack Boussinesq example +export BOUSS_MPI_PROCS=6 + +# CLAW_MPIEXEC should be set to the command used to execute MPI code: +export CLAW_MPIEXEC=mpiexec +# set CLAW_MPIEXEC to mpiexec only if this command is defined in your shell, +# e.g. to use some version of MPI was installed outside of PETSc. +# Or set to the full path to this command, e.g. for the PETSc version: +#export CLAW_MPIEXEC=$PETSC_DIR/$PETSC_ARCH/bin/mpiexec # requires PETSC_ARCH + +# set CLAW_MPIFC to the proper Fortran compiler to use for MPI code +# e.g. mpif90 if that is defined in your shell, or gfortran *might* work. +# This will over-rule any FC environment variable. +export CLAW_MPIFC=mpif90 From 56e1fa8a091b1a4d90aeef1348c2b2edbfc25b1d Mon Sep 17 00:00:00 2001 From: Randy LeVeque Date: Thu, 7 Nov 2024 10:51:32 -0500 Subject: [PATCH 2/2] add full path to petscMPIoptions in setenv.sh --- examples/bouss/setenv.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/bouss/setenv.sh b/examples/bouss/setenv.sh index ab17e33bd..2fe8876cd 100644 --- a/examples/bouss/setenv.sh +++ b/examples/bouss/setenv.sh @@ -22,7 +22,7 @@ export PETSC_ARCH=arch-darwin-c-opt # NEED TO FIX! # You may want to use a different version of petscMPIoptions # This setting uses the version in this directory: -export PETSC_OPTIONS="-options_file $PWD/petscMPIoptions" +export PETSC_OPTIONS="-options_file $CLAW/geoclaw/examples/bouss/petscMPIoptions" export OMP_NUM_THREADS=6 export BOUSS_MPI_PROCS=6 @@ -38,4 +38,3 @@ export CLAW_MPIEXEC=mpiexec # e.g. mpif90 if that is defined in your shell, or gfortran *might* work. # This will over-rule any FC environment variable. export CLAW_MPIFC=mpif90 -