diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index b3bd6a917a..9e2381268d 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -1,9 +1,10 @@ def Machine = 'none' def machine = 'none' def CUSTOM_WORKSPACE = 'none' -def cases = '' +def HOMEgfs = 'none' +def CI_CASES = '' def GH = 'none' -// Location of the custom workspaces for each machine in the CI system. They are persitent for each iteration of the PR. +// Location of the custom workspaces for each machine in the CI system. They are persistent for each iteration of the PR. def NodeName = [hera: 'Hera-EMC', orion: 'Orion-EMC', hercules: 'Hercules-EMC', gaea: 'Gaea'] def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/global/CI/HERCULES', gaea: '/gpfs/f5/epic/proj-shared/global/CI'] def repo_url = 'git@github.com:NOAA-EMC/global-workflow.git' @@ -19,8 +20,8 @@ pipeline { } stages { // This initial stage is used to get the Machine name from the GitHub labels on the PR - // which is used to designate the Nodes in the Jenkins Controler by the agent label - // Each Jenknis Node is connected to said machine via an JAVA agent via an ssh tunnel + // which is used to designate the Nodes in the Jenkins Controller by the agent label + // Each Jenkins Node is connected to said machine via an JAVA agent via an ssh tunnel // no op 2 stage('1. Get Machine') { @@ -81,7 +82,9 @@ pipeline { properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC', 'Gaea'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim() CUSTOM_WORKSPACE = "${WORKSPACE}" - sh(script: "mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS;rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS/*") + HOMEgfs = "${CUSTOM_WORKSPACE}/global-workflow" + sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/global-workflow; mkdir -p ${CUSTOM_WORKSPACE}/global-workflow") + sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS; mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS") sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """) } echo "Building and running on ${Machine} in directory ${CUSTOM_WORKSPACE}" @@ -90,94 +93,75 @@ pipeline { } stage('3. Build System') { - matrix { - agent { label NodeName[machine].toLowerCase() } - axes { - axis { - name 'system' - values 'gfs', 'gefs' - } - } - stages { - stage('Building') { - steps { - catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { - script { - def HOMEgfs = "${CUSTOM_WORKSPACE}/${system}" // local HOMEgfs is used to build the system on per system basis under the custome workspace for each buile system - env.HOME_GFS = HOMEgfs // setting path in HOMEgfs as an environment variable HOME_GFS for some systems that using the path in its .bashrc - sh(script: "mkdir -p ${HOMEgfs}") - ws(HOMEgfs) { - if (fileExists("${HOMEgfs}/sorc/BUILT_semaphor")) { // if the system is already built, skip the build in the case of re-runs - sh(script: "cat ${HOMEgfs}/sorc/BUILT_semaphor", returnStdout: true).trim() // TODO: and user configurable control to manage build semphore - checkout scm - dir('sorc') { - sh(script: './link_workflow.sh') - } - } else { - try { - echo "Checking out the code for ${system} on ${Machine} using scm in ${HOMEgfs}" - checkout scm - } catch (Exception e) { - if (env.CHANGE_ID) { - sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine} in Build# ${env.BUILD_NUMBER}: ${e.getMessage()}" """) - } - STATUS = 'Failed' - error("Failed to checkout: ${e.getMessage()}") - } - def gist_url = "" - def error_logs = "" - def error_logs_message = "" - def builds_file = readYaml file: 'ci/cases/yamls/build.yaml' - def build_args_list = builds_file['builds'] - def build_args = build_args_list[system].join(' ').trim().replaceAll('null', '') - dir("${HOMEgfs}/sorc") { - try { - sh(script: "${build_args}") - } catch (Exception error_build) { - echo "Failed to build system: ${error_build.getMessage()}" - if ( fileExists("logs/error.logs") ) { - def fileContent = readFile 'logs/error.logs' - def lines = fileContent.readLines() - for (line in lines) { - echo "archiving: ${line}" - if (fileExists("${line}") && readFile("${line}").length() > 0 ){ - try { - archiveArtifacts artifacts: "${line}", fingerprint: true - error_logs = error_logs + "${HOMEgfs}/sorc/${line} " - error_logs_message = error_logs_message + "${HOMEgfs}/sorc/${line}\n" - } - catch (Exception error_arch) { echo "Failed to archive error log ${line}: ${error_arch.getMessage()}" } - } - } - try { - sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}") - gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim() - sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """) - } catch (Exception error_comment) { - echo "Failed to comment on PR: ${error_comment.getMessage()}" - } - STATUS = 'Failed' - error("Failed to build system on ${Machine}") + agent { label NodeName[machine].toLowerCase() } + steps { + catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') { + script { + ws(HOMEgfs) { + echo "Checking out the code on ${Machine} using scm in ${HOMEgfs}" + try { + checkout scm + } catch (Exception e) { + echo "Failed to checkout the code on ${Machine} using scm in ${HOMEgfs}, try again ..." + sleep time: 45, unit: 'SECONDS' + try { + checkout scm + } catch (Exception ee) { + echo "Failed to checkout the code on ${Machine} using scm in ${HOMEgfs}: ${ee.getMessage()}" + if (env.CHANGE_ID) { + sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine} in Build# ${env.BUILD_NUMBER}: ${ee.getMessage()}" """) + } + STATUS = 'Failed' + error("Failed to checkout: ${ee.getMessage()}") + } + } + def gist_url = "" + def error_logs = "" + def error_logs_message = "" + dir("${HOMEgfs}/sorc") { + try { + sh(script: './build_all.sh -kgu') // build the global-workflow executables for GFS variant (UFS-wx-model, WW3 pre/post executables) + sh(script: './build_ww3prepost.sh -w > ./logs/build_ww3prepost_gefs.log 2>&1') // build the WW3 pre/post processing executables for GEFS variant + sh(script: './build_ufs.sh -w -e gefs_model.x > ./logs/build_ufs_gefs.log 2>&1') // build the UFS-wx-model executable for GEFS variant + } catch (Exception error_build) { + echo "Failed to build global-workflow: ${error_build.getMessage()}" + if ( fileExists("logs/error.logs") ) { + def fileContent = readFile 'logs/error.logs' + def lines = fileContent.readLines() + for (line in lines) { + echo "archiving: ${line}" + if (fileExists("${line}") && readFile("${line}").length() > 0 ){ + try { + archiveArtifacts artifacts: "${line}", fingerprint: true + error_logs = error_logs + "${HOMEgfs}/sorc/${line} " + error_logs_message = error_logs_message + "${HOMEgfs}/sorc/${line}\n" } + catch (Exception error_arch) { echo "Failed to archive error log ${line}: ${error_arch.getMessage()}" } } - sh(script: './link_workflow.sh') - //sh(script: "echo ${HOMEgfs} > BUILT_semaphor") } - } - if (env.CHANGE_ID && system == 'gfs') { try { - sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Running" --remove-label "CI-${Machine}-Building" """) - } catch (Exception e) { - echo "Failed to update label from Buildng to Running: ${e.getMessage()}" + sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}") + gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim() + sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """) + } catch (Exception error_comment) { + echo "Failed to comment on PR: ${error_comment.getMessage()}" } + STATUS = 'Failed' + error("Failed to build global-workflow on ${Machine}") } - if (system == 'gfs') { - cases = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split() - echo "Cases to run: ${cases}" - } - } + } + sh(script: './link_workflow.sh') + } + if (env.CHANGE_ID) { + try { + sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Running" --remove-label "CI-${Machine}-Building" """) + } catch (Exception e) { + echo "Failed to update label from Building to Running: ${e.getMessage()}" + } } - } + // Get a list of CI cases to run + CI_CASES = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split() + echo "Cases to run: ${CI_CASES}" } } } @@ -191,21 +175,16 @@ pipeline { agent { label NodeName[machine].toLowerCase() } steps { script { - def parallelStages = cases.collectEntries { caseName -> + def parallelStages = CI_CASES.collectEntries { caseName -> ["${caseName}": { stage("Create ${caseName}") { script { - sh(script: "sed -n '/{.*}/!p' ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${caseName}.yaml > ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${caseName}.yaml.tmp") - def yaml_case = readYaml file: "${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${caseName}.yaml.tmp" - def build_system = yaml_case.experiment.system - def HOMEgfs = "${CUSTOM_WORKSPACE}/${build_system}" // local HOMEgfs is used to populate the XML on per system basis - env.HOME_GFS = HOMEgfs // setting path in HOMEgfs as an environment variable HOME_GFS for some systems that using the path in its .bashrc env.RUNTESTS = "${CUSTOM_WORKSPACE}/RUNTESTS" try { error_output = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${caseName}.yaml", returnStdout: true).trim() } catch (Exception error_create) { - sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "${Case} **FAILED** to create experment on ${Machine} in BUILD# ${env.BUILD_NUMBER}\n with the error:\n\\`\\`\\`\n${error_output}\\`\\`\\`" """) - error("Case ${caseName} failed to create experment directory") + sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "${Case} **FAILED** to create experiment on ${Machine} in BUILD# ${env.BUILD_NUMBER}\n with the error:\n\\`\\`\\`\n${error_output}\\`\\`\\`" """) + error("Case ${caseName} failed to create experiment directory") } } } @@ -213,15 +192,11 @@ pipeline { stage("Running ${caseName}") { catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') { script { - HOMEgfs = "${CUSTOM_WORKSPACE}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments - env.HOME_GFS = HOMEgfs // setting path in HOMEgfs as an environment variable HOME_GFS for some systems that using the path in its .bashrc def pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${caseName}", returnStdout: true).trim() def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs" sh(script: " rm -f ${error_file}") - def yaml_case = readYaml file: "${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${caseName}.yaml.tmp" - def build_system = yaml_case.experiment.system try { - sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} ${build_system}") + sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow'") sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}") } catch (Exception error_experment) { sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}") @@ -251,7 +226,7 @@ pipeline { echo "Failed to comment on PR: ${error_comment.getMessage()}" } } else { - echo "No error logs found for failed cases in $CUSTOM_WORKSPACE/RUNTESTS/${pslot}_error.logs" + echo "No error logs found for failed cases in ${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs" } STATUS = 'Failed' try { @@ -273,11 +248,10 @@ pipeline { } } - stage( '5. FINALIZE' ) { + stage( '5. Finalize' ) { agent { label NodeName[machine].toLowerCase() } steps { script { - env.HOME_GFS = "${CUSTOM_WORKSPACE}/gfs" // setting path to HOMEgfs as an environment variable HOME_GFS for some systems that using the path in its .bashrc sh(script: """ labels=\$(${GH} pr view ${env.CHANGE_ID} --repo ${repo_url} --json labels --jq '.labels[].name') for label in \$labels; do @@ -298,7 +272,7 @@ pipeline { echo "Failed to remove custom work directory ${CUSTOM_WORKSPACE} on ${Machine}: ${e.getMessage()}" } } else { - echo "Failed to build and run Global-workflow in ${CUSTOM_WORKSPACE} and on ${Machine}" + echo "Failed to build and run global-workflow in ${CUSTOM_WORKSPACE} on ${Machine}" } } } diff --git a/ci/cases/pr/C48_S2SWA_gefs.yaml b/ci/cases/pr/C48_S2SWA_gefs.yaml index f39031f1a1..856759b46a 100644 --- a/ci/cases/pr/C48_S2SWA_gefs.yaml +++ b/ci/cases/pr/C48_S2SWA_gefs.yaml @@ -18,4 +18,4 @@ arguments: yaml: {{ HOMEgfs }}/ci/cases/yamls/gefs_defaults_ci.yaml skip_ci_on_hosts: - - wcoss2 + - None diff --git a/ci/cases/pr/C96_S2SWA_gefs_replay_ics.yaml b/ci/cases/pr/C96_S2SWA_gefs_replay_ics.yaml index 7118dde53f..b5c2742406 100644 --- a/ci/cases/pr/C96_S2SWA_gefs_replay_ics.yaml +++ b/ci/cases/pr/C96_S2SWA_gefs_replay_ics.yaml @@ -19,4 +19,4 @@ arguments: icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96mx100/20240610 skip_ci_on_hosts: - - wcoss2 + - None diff --git a/ci/cases/yamls/build.yaml b/ci/cases/yamls/build.yaml deleted file mode 100644 index 2b2938719f..0000000000 --- a/ci/cases/yamls/build.yaml +++ /dev/null @@ -1,3 +0,0 @@ -builds: - - gefs: './build_all.sh -kw' - - gfs: './build_all.sh -kgu' diff --git a/ci/scripts/utils/ci_utils_wrapper.sh b/ci/scripts/utils/ci_utils_wrapper.sh index 51c392fb99..a4fd1776c8 100755 --- a/ci/scripts/utils/ci_utils_wrapper.sh +++ b/ci/scripts/utils/ci_utils_wrapper.sh @@ -3,7 +3,7 @@ HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." >/dev/null 2>&1 && pwd )" source "${HOMEgfs}/ush/detect_machine.sh" -utitilty_function="${1}" +utility_function="${1}" source "${HOMEgfs}/ci/scripts/utils/ci_utils.sh" -${utitilty_function} "${@:2}" +${utility_function} "${@:2}" diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index d57e7e9e92..43e2724dc6 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -51,7 +51,11 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" #export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on -export FCSTEXEC="ufs_model.x" +if [[ "${SFS_POST:-}" == "YES" ]]; then + export FCSTEXEC="sfs_model.x" +else + export FCSTEXEC="gefs_model.x" +fi ####################################################################### # Model configuration @@ -105,7 +109,7 @@ if (( gwd_opt == 2 )); then export do_gsl_drag_ls_bl=".true." export do_gsl_drag_ss=".false." export do_gsl_drag_tofd=".true." - export do_gwd_opt_psl=".true." + export do_gwd_opt_psl=".true." export do_ugwp_v1_orog_only=".false." launch_level=$(echo "${LEVS}/2.35" |bc) export launch_level @@ -133,7 +137,7 @@ if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi if [[ "${DO_AERO_FCST}" == "YES" ]]; then export IAER=2011 # spectral band mapping method for aerosol optical properties else - export IAER=1011 + export IAER=1011 fi export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index c95f8796e5..5be4f4eca7 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -62,7 +62,7 @@ export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_O export FORECASTSH="${SCRgfs}/exglobal_forecast.sh" #export FORECASTSH="${SCRgfs}/exglobal_forecast.py" # Temp. while this is worked on -export FCSTEXEC="ufs_model.x" +export FCSTEXEC="gfs_model.x" ####################################################################### # Model configuration @@ -116,7 +116,7 @@ if (( gwd_opt == 2 )); then export do_gsl_drag_ls_bl=".true." export do_gsl_drag_ss=".false." export do_gsl_drag_tofd=".true." - export do_gwd_opt_psl=".true." + export do_gwd_opt_psl=".true." export do_ugwp_v1_orog_only=".false." launch_level=$(echo "${LEVS}/2.35" |bc) export launch_level @@ -148,7 +148,7 @@ if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi if [[ "${DO_AERO_FCST}" == "YES" ]]; then export IAER=2011 # spectral band mapping method for aerosol optical properties else - export IAER=1011 + export IAER=1011 fi export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh index 06769303a1..96c60fc96c 100755 --- a/scripts/exgfs_wave_post_pnt.sh +++ b/scripts/exgfs_wave_post_pnt.sh @@ -271,18 +271,18 @@ source "${USHgfs}/preamble.sh" rm -f buoy_tmp.loc buoy_log.ww3 ww3_oup.inp ${NLN} ./out_pnt.${waveuoutpGRD} ./out_pnt.ww3 ${NLN} ./mod_def.${waveuoutpGRD} ./mod_def.ww3 - export pgm=ww3_outp;. prep_step - ${EXECgfs}/ww3_outp > buoy_lst.loc 2>&1 - export err=$?;err_chk + export pgm="${NET,,}_ww3_outp.x" + source prep_step + "${EXECgfs}/${pgm}" > buoy_lst.loc 2>&1 + export err=$?;err_chk if [ "$err" != '0' ] && [ ! -f buoy_log.ww3 ] then - pgm=wave_post set +x echo ' ' echo '******************************************** ' - echo '*** FATAL ERROR : ERROR IN ww3_outp *** ' + echo "*** FATAL ERROR : ERROR IN ${pgm} *** " echo '******************************************** ' echo ' ' cat buoy_tmp.loc diff --git a/sorc/build_all.sh b/sorc/build_all.sh index e75c853c39..9414846f2a 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -56,6 +56,7 @@ _wave_opt="" _hydro_opt="" _build_job_max=20 _quick_kill="NO" +_ufs_exec="-e gfs_model.x" # Reset option counter in case this script is sourced OPTIND=1 while getopts ":a:dfghj:kuvwy" option; do @@ -69,8 +70,8 @@ while getopts ":a:dfghj:kuvwy" option; do k) _quick_kill="YES" ;; u) _build_ufsda="YES" ;; v) _verbose_opt="-v";; - w) _wave_opt="-w";; - y) _hydro_opt="-y";; + w) _wave_opt="-w"; _ufs_exec="-e gefs_model.x";; + y) _hydro_opt="-y"; _ufs_exec="-e sfs_model.x";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" _usage @@ -133,7 +134,7 @@ declare -A build_opts big_jobs=0 build_jobs["ufs"]=8 big_jobs=$((big_jobs+1)) -build_opts["ufs"]="${_wave_opt} ${_hydro_opt} ${_verbose_opt} ${_build_ufs_opt} ${_build_debug}" +build_opts["ufs"]="${_wave_opt} ${_hydro_opt} ${_verbose_opt} ${_build_ufs_opt} ${_build_debug} ${_ufs_exec}" build_jobs["upp"]=1 build_opts["upp"]="${_build_debug}" diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index 6432962a5a..773c104be3 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -8,8 +8,9 @@ APP="S2SWA" CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1,FV3_global_nest_v1" # TODO: does the g-w need to build with all these CCPP_SUITES? PDLIB="ON" HYDRO="OFF" +EXEC_NAME="gfs_model.x" -while getopts ":da:fj:vwy" option; do +while getopts ":da:fj:e:vwy" option; do case "${option}" in d) BUILD_TYPE="Debug";; a) APP="${OPTARG}";; @@ -18,6 +19,7 @@ while getopts ":da:fj:vwy" option; do v) export BUILD_VERBOSE="YES";; w) PDLIB="OFF";; y) HYDRO="ON";; + e) EXEC_NAME="${OPTARG}";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" ;; @@ -40,13 +42,20 @@ if [[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] ; then elif [[ "${FASTER:-OFF}" == ON ]] ; then MAKE_OPT+=" -DFASTER=ON" fi -COMPILE_NR=0 + +case "${EXEC_NAME}" in + "ufs_model.x") COMPILE_ID=0 ;; + "gfs_model.x") COMPILE_ID=1 ;; + "gefs_model.x") COMPILE_ID=2 ;; + "sfs_model.x") COMPILE_ID=3 ;; + *) echo "Unsupported executable name: ${EXEC_NAME}"; exit 1 ;; +esac CLEAN_BEFORE=YES CLEAN_AFTER=NO -BUILD_JOBS=${BUILD_JOBS:-8} ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" -mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x -mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua -cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua +BUILD_JOBS=${BUILD_JOBS:-8} ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_ID}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" +mv "./tests/fv3_${COMPILE_ID}.exe" "./tests/${EXEC_NAME}" +if [[ ! -f "./tests/modules.ufs_model.lua" ]]; then mv "./tests/modules.fv3_${COMPILE_ID}.lua" "./tests/modules.ufs_model.lua"; fi +if [[ ! -f "./tests/ufs_common.lua" ]]; then cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua; fi exit 0 diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh index 67ee5e1dc2..7b3537db84 100755 --- a/sorc/build_ww3prepost.sh +++ b/sorc/build_ww3prepost.sh @@ -1,69 +1,56 @@ #! /usr/bin/env bash set -x -script_dir=$(dirname "${BASH_SOURCE[0]}") -cd "${script_dir}" || exit 1 +# shellcheck disable=SC2312 +_HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )/.." && pwd -P) +cd "${_HOMEgfs}/sorc" || exit 1 # Default settings -APP="S2SWA" PDLIB="ON" while getopts ":j:a:dvw" option; do case "${option}" in - a) APP="${OPTARG}";; - d) BUILD_TYPE="Debug";; - j) BUILD_JOBS="${OPTARG}";; - v) export BUILD_VERBOSE="YES";; - w) PDLIB="OFF";; - :) - echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage - ;; - *) - echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage - ;; + d) BUILD_TYPE="Debug" ;; + j) BUILD_JOBS="${OPTARG}" ;; + v) export BUILD_VERBOSE="YES" ;; + w) PDLIB="OFF" ;; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; esac done -# Determine which switch to use -if [[ "${APP}" == "ATMW" ]]; then - ww3switch="model/esmf/switch" -else - if [[ "${PDLIB}" == "ON" ]]; then - ww3switch="model/bin/switch_meshcap_pdlib" - else - ww3switch="model/bin/switch_meshcap" - fi -fi - -# Check final exec folder exists -if [[ ! -d "../exec" ]]; then - mkdir ../exec -fi - -finalexecdir="$( pwd -P )/../exec" - -#Determine machine and load modules +# Determine machine and load modules set +x -source "${script_dir}/ufs_model.fd/tests/detect_machine.sh" -source "${script_dir}/ufs_model.fd/tests/module-setup.sh" -module use "${script_dir}/ufs_model.fd/modulefiles" +source "${_HOMEgfs}/sorc/ufs_model.fd/tests/detect_machine.sh" +source "${_HOMEgfs}/sorc/ufs_model.fd/tests/module-setup.sh" +module use "${_HOMEgfs}/sorc/ufs_model.fd/modulefiles" module load "ufs_${MACHINE_ID}.intel" set -x -#Set WW3 directory, switch, prep and post exes -cd ufs_model.fd/WW3 || exit 1 -WW3_DIR=$( pwd -P ) +#Set WW3 directory +cd "${_HOMEgfs}/sorc/ufs_model.fd/WW3" || exit 1 +WW3_DIR=$(pwd -P) export WW3_DIR -export SWITCHFILE="${WW3_DIR}/${ww3switch}" -# Build exes for prep jobs and post jobs: -prep_exes="ww3_grid ww3_prep ww3_prnc ww3_grid" -post_exes="ww3_outp ww3_outf ww3_outp ww3_gint ww3_ounf ww3_ounp ww3_grib" +# Determine which switch to use +if [[ "${PDLIB}" == "ON" ]]; then + ww3switch="model/bin/switch_meshcap_pdlib" + path_build="${WW3_DIR}/build/pdlib_ON" + path_install="${WW3_DIR}/install/pdlib_ON" +else + ww3switch="model/bin/switch_meshcap" + path_build="${WW3_DIR}/build/pdlib_OFF" + path_install="${WW3_DIR}/install/pdlib_OFF" +fi +export SWITCHFILE="${WW3_DIR}/${ww3switch}" #create build directory: -path_build="${WW3_DIR}/build_SHRD" [[ -d "${path_build}" ]] && rm -rf "${path_build}" mkdir -p "${path_build}" || exit 1 cd "${path_build}" || exit 1 @@ -71,61 +58,49 @@ echo "Forcing a SHRD build" buildswitch="${path_build}/switch" -cat "${SWITCHFILE}" > "${path_build}/tempswitch" - -sed -e "s/DIST/SHRD/g"\ - -e "s/OMPG / /g"\ - -e "s/OMPH / /g"\ - -e "s/MPIT / /g"\ - -e "s/MPI / /g"\ - -e "s/B4B / /g"\ - -e "s/PDLIB / /g"\ - -e "s/SCOTCH / /g"\ - -e "s/METIS / /g"\ - -e "s/NOGRB/NCEP2/g"\ - "${path_build}/tempswitch" > "${path_build}/switch" +cat "${SWITCHFILE}" >"${path_build}/tempswitch" + +sed -e "s/DIST/SHRD/g" \ + -e "s/OMPG / /g" \ + -e "s/OMPH / /g" \ + -e "s/MPIT / /g" \ + -e "s/MPI / /g" \ + -e "s/B4B / /g" \ + -e "s/PDLIB / /g" \ + -e "s/SCOTCH / /g" \ + -e "s/METIS / /g" \ + -e "s/NOGRB/NCEP2/g" \ + "${path_build}/tempswitch" >"${path_build}/switch" rm "${path_build}/tempswitch" echo "Switch file is ${buildswitch} with switches:" cat "${buildswitch}" #define cmake build options -MAKE_OPT="-DCMAKE_INSTALL_PREFIX=install" +MAKE_OPT="-DCMAKE_INSTALL_PREFIX=${path_install}" [[ ${BUILD_TYPE:-"Release"} = "Debug" ]] && MAKE_OPT+=" -DCMAKE_BUILD_TYPE=Debug" #Build executables: # shellcheck disable=SC2086 cmake "${WW3_DIR}" -DSWITCH="${buildswitch}" ${MAKE_OPT} rc=$? -if (( rc != 0 )); then +if ((rc != 0)); then echo "Fatal error in cmake." exit "${rc}" fi + make -j "${BUILD_JOBS:-8}" rc=$? -if (( rc != 0 )); then +if ((rc != 0)); then echo "Fatal error in make." exit "${rc}" fi + make install -if (( rc != 0 )); then +rc=$? +if ((rc != 0)); then echo "Fatal error in make install." exit "${rc}" fi -# Copy to top-level exe directory -for prog in ${prep_exes} ${post_exes}; do - cp "${path_build}/install/bin/${prog}" "${finalexecdir}/" - rc=$? - if (( rc != 0 )); then - echo "FATAL: Unable to copy ${path_build}/${prog} to ${finalexecdir} (Error code ${rc})" - exit "${rc}" - fi -done - -#clean-up build directory: -echo "executables are in ${finalexecdir}" -echo "cleaning up ${path_build}" -rm -rf "${path_build}" - exit 0 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index a6d55d462c..ecbad78693 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -2,11 +2,11 @@ #--make symbolic links for EMC installation and hardcopies for NCO delivery -HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." >/dev/null 2>&1 && pwd )" +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." >/dev/null 2>&1 && pwd)" TRACE=NO source "${HOMEgfs}/ush/preamble.sh" function usage() { - cat << EOF + cat < "grib2_${grdnam}_${FH3}.out" 2>&1 + "${EXECgfs}/${pgm}" > "grib2_${grdnam}_${FH3}.out" 2>&1 export err=$?;err_chk - if [ ! -s gribfile ]; then set +x echo ' ' echo '************************************************ ' - echo '*** FATAL ERROR : ERROR IN ww3_grib encoding *** ' + echo "*** FATAL ERROR : ERROR IN ${pgm} encoding *** " echo '************************************************ ' echo ' ' set_trace exit 3 fi - if (( fhr > 0 )); then + if (( fhr > 0 )); then ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COMOUT_WAVE_GRID}/${outfile}" err=$? else @@ -169,7 +170,7 @@ if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then set +x echo ' ' echo '********************************************* ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 *** ' + echo "*** FATAL ERROR : ERROR IN ${pgm} *** " # FIXME: This is not an error in $pgm, but in WGRIB2 echo '********************************************* ' echo ' ' set_trace @@ -197,7 +198,7 @@ if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then set +x echo ' ' echo '********************************************* ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 *** ' + echo "*** FATAL ERROR : ERROR IN ${pgm} *** " echo '********************************************* ' echo ' ' echo " Error in moving grib file ${outfile} to com" @@ -209,7 +210,7 @@ if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then set +x echo ' ' echo '*************************************************** ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 INDEX FILE *** ' + echo "*** FATAL ERROR : ERROR IN ${pgm} INDEX FILE *** " echo '*************************************************** ' echo ' ' echo " Error in moving grib file ${outfile}.idx to com" diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh index 31b7808c16..d56c5d0763 100755 --- a/ush/wave_grid_interp_sbs.sh +++ b/ush/wave_grid_interp_sbs.sh @@ -133,17 +133,18 @@ source "${USHgfs}/preamble.sh" # 1.b Run interpolation code + export pgm="${NET,,}_ww3_gint.x" + source prep_step + set +x - echo " Run ww3_gint - echo " Executing ${EXECgfs}/ww3_gint + echo " Executing ${pgm}" set_trace - export pgm=ww3_gint;. prep_step - ${EXECgfs}/ww3_gint 1> gint.${grdID}.out 2>&1 + "${EXECgfs}/${pgm}" 1> gint.${grdID}.out 2>&1 export err=$?;err_chk # Write interpolation file to main TEMP dir area if not there yet - if [ "wht_OK" = 'no' ] + if [ "wht_OK" = 'no' ] # FIXME: This is never going to evaluate to true, wht_OK is a string and needs to be ${wht_OK}. With ${wht_OK}, the next line is trying to copy into ${FIXgfs} space. This leads to a Permission denied error. The logic here needs to be evaluated and recoded. #TODO then cp -f ./WHTGRIDINT.bin ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} cp -f ./WHTGRIDINT.bin ${FIXgfs}/wave/ww3_gint.WHTGRIDINT.bin.${grdID} @@ -155,7 +156,7 @@ source "${USHgfs}/preamble.sh" set +x echo ' ' echo '*************************************************** ' - echo '*** FATAL ERROR : ERROR IN ww3_gint interpolation * ' + echo "*** FATAL ERROR : ERROR IN ${pgm} interpolation * " echo '*************************************************** ' echo ' ' set_trace diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh index 1e8c44054a..dd0ee204f6 100755 --- a/ush/wave_grid_moddef.sh +++ b/ush/wave_grid_moddef.sh @@ -1,5 +1,5 @@ #! /usr/bin/env bash - + ################################################################################ # # UNIX Script Documentation Block @@ -72,27 +72,28 @@ source "${USHgfs}/preamble.sh" fi # --------------------------------------------------------------------------- # -# 2. Create mod_def file +# 2. Create mod_def file set +x echo ' ' echo ' Creating mod_def file ...' - echo " Executing ${EXECgfs}/ww3_grid" + echo " Executing ${EXECgfs}/${NET,,}_ww3_grid.x" echo ' ' set_trace - - rm -f ww3_grid.inp + + rm -f ww3_grid.inp ${NLN} ../ww3_grid.inp.$grdID ww3_grid.inp if [ -f ../${grdID}.msh ] then - rm -f ${grdID}.msh + rm -f ${grdID}.msh ${NLN} ../${grdID}.msh ${grdID}.msh fi - - "${EXECgfs}/ww3_grid" 1> "grid_${grdID}.out" 2>&1 + export pgm="${NET,,}_ww3_grid.x" + + "${EXECgfs}/${pgm}" 1> "grid_${grdID}.out" 2>&1 err=$? if [ "$err" != '0' ] @@ -100,13 +101,13 @@ source "${USHgfs}/preamble.sh" set +x echo ' ' echo '******************************************** ' - echo '*** FATAL ERROR : ERROR IN ww3_grid *** ' + echo "*** FATAL ERROR : ERROR IN ${pgm} *** " echo '******************************************** ' echo ' ' set_trace exit 3 fi - + if [[ -f mod_def.ww3 ]] then cp mod_def.ww3 "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh index 37accbae49..929979bbf9 100755 --- a/ush/wave_outp_spec.sh +++ b/ush/wave_outp_spec.sh @@ -161,21 +161,21 @@ source "${USHgfs}/preamble.sh" # 2.b Run the postprocessor + export pgm="${NET,,}_ww3_outp.x" + source prep_step + set +x - echo " Executing ${EXECgfs}/ww3_outp" + echo " Executing ${EXECgfs}/${pgm}" set_trace - export pgm=ww3_outp;. prep_step - ${EXECgfs}/ww3_outp 1> outp_${specdir}_${buoy}.out 2>&1 + "${EXECgfs}/${pgm}" 1> outp_${specdir}_${buoy}.out 2>&1 export err=$?;err_chk - - if [ "$err" != '0' ] then set +x echo ' ' echo '******************************************** ' - echo '*** FATAL ERROR : ERROR IN ww3_outp *** ' + echo '*** FATAL ERROR : ERROR IN ${pgm} *** ' echo '******************************************** ' echo ' ' set_trace @@ -219,7 +219,7 @@ source "${USHgfs}/preamble.sh" set +x echo ' ' echo '***************************************************************** ' - echo '*** FATAL ERROR : OUTPUT DATA FILE FOR BOUY $bouy NOT FOUND *** ' + echo "*** FATAL ERROR : OUTPUT DATA FILE FOR BOUY ${bouy} NOT FOUND *** " echo '***************************************************************** ' echo ' ' set_trace diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh index 927710c581..cfd499330f 100755 --- a/ush/wave_prnc_cur.sh +++ b/ush/wave_prnc_cur.sh @@ -74,22 +74,22 @@ rm -f cur.nc ${NLN} "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" "cur.nc" ${NLN} "${DATA}/mod_def.${WAVECUR_FID}" ./mod_def.ww3 -export pgm=ww3_prnc;. prep_step -${EXECgfs}/ww3_prnc 1> prnc_${WAVECUR_FID}_${ymdh_rtofs}.out 2>&1 +export pgm="${NET,,}_ww3_prnc.x" +source prep_step +"${EXECgfs}/${pgm}" 1> prnc_${WAVECUR_FID}_${ymdh_rtofs}.out 2>&1 export err=$?; err_chk - if [ "$err" != '0' ] then cat prnc_${WAVECUR_FID}_${ymdh_rtofs}.out set $setoff echo ' ' echo '******************************************** ' - echo '*** WARNING: NON-FATAL ERROR IN ww3_prnc *** ' + echo "*** WARNING: NON-FATAL ERROR IN ${pgm} *** " echo '******************************************** ' echo ' ' set $seton - echo "WARNING: NON-FATAL ERROR IN ww3_prnc." + echo "WARNING: NON-FATAL ERROR IN ${pgm}." exit 4 fi diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh index be089c30bd..d812bccd10 100755 --- a/ush/wave_prnc_ice.sh +++ b/ush/wave_prnc_ice.sh @@ -32,7 +32,7 @@ source "${USHgfs}/preamble.sh" # 0.a Basic modes of operation cd $DATA - + rm -rf ice mkdir ice cd ice @@ -57,7 +57,7 @@ source "${USHgfs}/preamble.sh" if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || \ [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${FIXgfs}" ]] || [[ -z "${EXECgfs}" ]] || \ [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COMIN_OBS}" ]]; then - + set +x echo ' ' echo '**************************************************' @@ -129,7 +129,7 @@ source "${USHgfs}/preamble.sh" fi rm -f wgrib.out - rm -f ice.grib + rm -f ice.grib rm -f ice.index @@ -142,14 +142,14 @@ source "${USHgfs}/preamble.sh" cp -f ${DATA}/ww3_prnc.ice.$WAVEICE_FID.inp.tmpl ww3_prnc.inp - export pgm=ww3_prnc;. prep_step + export pgm="${NET,,}_ww3_prnc.x" + source prep_step - ${EXECgfs}/ww3_prnc 1> prnc_${WAVEICE_FID}_${cycle}.out 2>&1 + "${EXECgfs}/${pgm}" 1> prnc_${WAVEICE_FID}_${cycle}.out 2>&1 export err=$?; err_chk - if [ "$err" != '0' ] then - cat prnc_${WAVEICE_FID}_${cycle}.out + cat prnc_${WAVEICE_FID}_${cycle}.out set +x echo ' ' echo '******************************************** ' @@ -170,13 +170,13 @@ source "${USHgfs}/preamble.sh" # and only WAV_MOD_ID if WW3ATMIENS=F # if [ "${WW3ATMIENS}" = "T" ] - then + then icefile=${WAV_MOD_TAG}.${WAVEICE_FID}.$cycle.ice elif [ "${WW3ATMIENS}" = "F" ] - then + then icefile=${RUN}wave.${WAVEICE_FID}.$cycle.ice fi - + set +x echo " Saving ice.ww3 as ${COMOUT_WAVE_PREP}/${icefile}" set_trace