diff --git a/.gitignore b/.gitignore index f3cb1e1b3e..ce64a5ba48 100644 --- a/.gitignore +++ b/.gitignore @@ -81,6 +81,7 @@ parm/ufs/MOM6_data_table.IN parm/ufs/ice_in.IN parm/ufs/ufs.configure.*.IN parm/ufs/post_itag_gfs +parm/ufs/ww3_shel.nml.IN parm/wafs # Ignore sorc and logs folders from externals diff --git a/docs/source/wave.rst b/docs/source/wave.rst index 56aa34ce3b..52a984b6b3 100644 --- a/docs/source/wave.rst +++ b/docs/source/wave.rst @@ -21,6 +21,8 @@ Here are several regional naming conventions: +===========+=======================+ | glo | Global domain | +-----------+-----------------------+ +| uglo | Unstructured global | ++-----------+-----------------------+ | ak | Alaska | +-----------+-----------------------+ | ao or aoc | Arctic Ocean | diff --git a/parm/archive/gdaswave.yaml.j2 b/parm/archive/gdaswave.yaml.j2 index 220770b38d..109c70c181 100644 --- a/parm/archive/gdaswave.yaml.j2 +++ b/parm/archive/gdaswave.yaml.j2 @@ -1,5 +1,5 @@ gdaswave: - {% set head = "gdas.wave.t" + cycle_HH + "z." %} + {% set head = "gdaswave.t" + cycle_HH + "z." %} name: "GDASWAVE" target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdaswave.tar" required: diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index c46023aff6..b0f430b91a 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -512,9 +512,7 @@ fi # Set the name of the UFS (previously nems) configure template to use # Default ufs.configure templates for supported model configurations -if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then - tmpl_suffix="_esmf" -fi +# WW3 restart field variable is different for slow vs fast loop. Add WW3_RSTFLDS="ice" for slow loop variables based on coupling scheme. case "${model_list}" in atm) default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" @@ -533,9 +531,11 @@ case "${model_list}" in ;; atm.ocean.ice.wave) default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" + WW3_RSTFLDS="ice" ;; atm.ocean.ice.wave.aero) default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" + WW3_RSTFLDS="ice" ;; *) echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" @@ -547,6 +547,9 @@ esac export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} unset model_list default_template +#export wave restart variable: +export WW3_RSTFLDS=${WW3_RSTFLDS:-" "} + if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." exit 17 diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 4781f97274..fbbe5c782f 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -219,7 +219,7 @@ case "${CASE}" in export waveGRD='uglo_100km' ;; "C768" | "C1152") - export waveGRD='uglo_m1g16' + export waveGRD='uglo_15km' ;; *) echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index 9737404dd1..cc69d0f6a7 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -15,7 +15,7 @@ if (( $# <= 1 )); then echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km|uglo_m1g16" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_100|glo_200|glo_500|mx025|uglo_100km" echo "--gocart" exit 1 @@ -605,12 +605,12 @@ if [[ "${skip_ww3}" == "false" ]]; then "mx025") ntasks_ww3=80 ;; - "uglo_100km") - ntasks_ww3=40 + "uglo_15km") + ntasks_ww3=1000 nthreads_ww3=1 ;; - "uglo_m1g16") - ntasks_ww3=1000 + "uglo_100km") + ntasks_ww3=40 nthreads_ww3=1 ;; *) @@ -630,9 +630,7 @@ fi # Set the name of the UFS (previously nems) configure template to use # Default ufs.configure templates for supported model configurations -if [[ "${USE_ESMF_THREADING:-}" == "YES" ]]; then - tmpl_suffix="_esmf" -fi +# WW3 restart field variable is different for slow vs fast loop. Add WW3_RSTFLDS="ice" for slow loop variables based on coupling scheme. case "${model_list}" in atm) default_template="${PARMgfs}/ufs/ufs.configure.atm${tmpl_suffix:-}.IN" @@ -651,9 +649,11 @@ case "${model_list}" in ;; atm.ocean.ice.wave) default_template="${PARMgfs}/ufs/ufs.configure.s2sw${tmpl_suffix:-}.IN" + WW3_RSTFLDS="ice" ;; atm.ocean.ice.wave.aero) default_template="${PARMgfs}/ufs/ufs.configure.s2swa${tmpl_suffix:-}.IN" + WW3_RSTFLDS="ice" ;; *) echo "FATAL ERROR: Unsupported UFSWM configuration for ${model_list}" @@ -665,6 +665,9 @@ esac export ufs_configure_template=${ufs_configure_template:-${default_template:-"/dev/null"}} unset model_list default_template +# export wave restart variable: +export WW3_RSTFLDS=${WW3_RSTFLDS:-" "} + if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." exit 17 diff --git a/parm/config/gfs/config.wave b/parm/config/gfs/config.wave index e792f45473..722e1122e4 100644 --- a/parm/config/gfs/config.wave +++ b/parm/config/gfs/config.wave @@ -56,18 +56,18 @@ case "${waveGRD}" in export wavepostGRD='glo_500' export waveuoutpGRD=${waveGRD} ;; + "uglo_15km") + # unstructured 15km grid + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; "uglo_100km") #unstructured 100km grid export waveinterpGRD='glo_200' export wavepostGRD='' export waveuoutpGRD=${waveGRD} ;; - "uglo_m1g16") - #unstructured m1v16 grid - export waveinterpGRD='glo_15mxt' - export wavepostGRD='' - export waveuoutpGRD=${waveGRD} - ;; *) echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." exit 1 diff --git a/parm/wave/ak_10m_interp.inp.tmpl b/parm/wave/ak_10m_interp.inp.tmpl index 225ab3628d..c397e17e5d 100755 --- a/parm/wave/ak_10m_interp.inp.tmpl +++ b/parm/wave/ak_10m_interp.inp.tmpl @@ -3,10 +3,9 @@ $------------------------------------------------ $ Start Time DT NSteps TIME DT NSTEPS $ Total number of grids - 3 + 2 $ Grid extensions - 'gnh_10m' - 'aoc_9km' + 'uglo_15km' 'ak_10m' $ 0 diff --git a/parm/wave/at_10m_interp.inp.tmpl b/parm/wave/at_10m_interp.inp.tmpl index 6f4c1f7099..903f49504f 100755 --- a/parm/wave/at_10m_interp.inp.tmpl +++ b/parm/wave/at_10m_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'at_10m' $ 0 diff --git a/parm/wave/ep_10m_interp.inp.tmpl b/parm/wave/ep_10m_interp.inp.tmpl index 23cfd50c2e..292522325c 100755 --- a/parm/wave/ep_10m_interp.inp.tmpl +++ b/parm/wave/ep_10m_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'ep_10m' $ 0 diff --git a/parm/wave/glo_15mxt_interp.inp.tmpl b/parm/wave/glo_15mxt_interp.inp.tmpl index 19e9dae684..e9ba00ef23 100755 --- a/parm/wave/glo_15mxt_interp.inp.tmpl +++ b/parm/wave/glo_15mxt_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'glo_15mxt' $ 0 diff --git a/parm/wave/glo_30m_interp.inp.tmpl b/parm/wave/glo_30m_interp.inp.tmpl index c62881202c..611907fb15 100755 --- a/parm/wave/glo_30m_interp.inp.tmpl +++ b/parm/wave/glo_30m_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'glo_30m' $ 0 diff --git a/parm/wave/wc_10m_interp.inp.tmpl b/parm/wave/wc_10m_interp.inp.tmpl index 8338c91d0c..234a9a1f93 100755 --- a/parm/wave/wc_10m_interp.inp.tmpl +++ b/parm/wave/wc_10m_interp.inp.tmpl @@ -5,7 +5,7 @@ $ Start Time DT NSteps $ Total number of grids 2 $ Grid extensions - 'uglo_m1g16' + 'uglo_15km' 'wc_10m' $ 0 diff --git a/parm/wave/ww3_shel.inp.tmpl b/parm/wave/ww3_shel.inp.tmpl deleted file mode 100644 index 0b9b335e1b..0000000000 --- a/parm/wave/ww3_shel.inp.tmpl +++ /dev/null @@ -1,42 +0,0 @@ -$ -------------------------------------------------------------------- $ -$ WAVEWATCH III shel input file $ -$ -------------------------------------------------------------------- $ -$ Include ice and mud parameters only if IC1/2/3/4 used : - F F Water levels - CURRLINE - WINDLINE - ICELINE - F F Atmospheric momentum - F F Air density - F Assimilation data : Mean parameters - F Assimilation data : 1-D spectra - F Assimilation data : 2-D spectra -$ - RUN_BEG - RUN_END -$ -$ IOSTYP - IOSRV -$ - OUT_BEG DTFLD OUT_END GOFILETYPE - N - OUTPARS -$ - OUT_BEG DTPNT OUT_END POFILETYPE -BUOY_FILE -$ - OUT_BEG 0 OUT_END -$ -$ Keep next two lines formatting as is to allow proper parsing - RST_BEG DTRST RST_END RSTTYPE -RST_2_BEG DT_2_RST RST_2_END -$ - OUT_BEG 0 OUT_END -$ - OUT_BEG 0 OUT_END -$ - 'the_end' 0 -$ - 'STP' -$ -$ End of input file diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index b70b9e894f..a09923fd98 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -178,19 +178,13 @@ declare -a ufs_templates=("model_configure.IN" "input_global_nest.nml.IN" "MOM6_data_table.IN" "ice_in.IN" "ufs.configure.atm.IN" - "ufs.configure.atm_esmf.IN" "ufs.configure.atmaero.IN" - "ufs.configure.atmaero_esmf.IN" "ufs.configure.s2s.IN" - "ufs.configure.s2s_esmf.IN" "ufs.configure.s2sa.IN" - "ufs.configure.s2sa_esmf.IN" "ufs.configure.s2sw.IN" - "ufs.configure.s2sw_esmf.IN" "ufs.configure.s2swa.IN" - "ufs.configure.s2swa_esmf.IN" "ufs.configure.leapfrog_atm_wav.IN" - "ufs.configure.leapfrog_atm_wav_esmf.IN" + "ww3_shel.nml.IN" "post_itag_gfs") for file in "${ufs_templates[@]}"; do [[ -s "${file}" ]] && rm -f "${file}" diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 63ace62a36..76471dc6b7 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 63ace62a36a263f03b914a92fc5536509e862dbc +Subproject commit 76471dc6b7bfc3342416d1a3402f360724f7c0fa diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 25cd4d36f0..755f0f2a66 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -337,10 +337,9 @@ FV3_out() { # shellcheck disable=SC2034 WW3_postdet() { echo "SUB ${FUNCNAME[0]}: Linking input data for WW3" - local ww3_grid first_ww3_restart_out ww3_restart_file # Copy initial condition files: - local restart_date restart_dir + local restart_date restart_dir seconds if [[ "${RERUN}" == "YES" ]]; then restart_date="${RERUN_DATE}" restart_dir="${DATArestart}/WW3_RESTART" @@ -350,27 +349,45 @@ WW3_postdet() { fi echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" - ww3_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3" - if [[ -s "${ww3_restart_file}" ]]; then - ${NCP} "${ww3_restart_file}" "${DATA}/restart.ww3" \ - || ( echo "FATAL ERROR: Unable to copy WW3 IC, ABORT!"; exit 1 ) - first_ww3_restart_out=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H) + + #First check to see if netcdf restart exists: + local ww3_binary_restart_file ww3_netcdf_restart_file + ww3_binary_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3" + ww3_netcdf_restart_file="${restart_dir}/${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3.nc" + if [[ -s "${ww3_netcdf_restart_file}" ]]; then + export WW3_restart_from_binary=false + seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds + local ww3_restart_dest_file="ufs.cpld.ww3.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" + ${NCP} "${ww3_netcdf_restart_file}" "${DATA}/${ww3_restart_dest_file}" \ + || ( echo "FATAL ERROR: Unable to copy netcdf WW3 IC, ABORT!"; exit 1 ) + elif [[ -s "${ww3_binary_restart_file}" ]]; then + # found binary ww3 restart file + export WW3_restart_from_binary=true + ${NCP} "${ww3_binary_restart_file}" "${DATA}/restart.ww3" \ + || ( echo "FATAL ERROR: Unable to copy binary WW3 IC, ABORT!"; exit 1 ) else if [[ "${RERUN}" == "YES" ]]; then # In the case of a RERUN, the WW3 restart file is required - echo "FATAL ERROR: WW3 restart file '${ww3_restart_file}' not found for RERUN='${RERUN}', ABORT!" + echo "FATAL ERROR: WW3 binary | netcdf restart file '${ww3_binary_restart_file}' | '${ww3_netcdf_restart_file}' not found for RERUN='${RERUN}', ABORT!" exit 1 else - echo "WARNING: WW3 restart file '${ww3_restart_file}' not found for warm_start='${warm_start}', will start from rest!" - first_ww3_restart_out=${model_start_date_current_cycle} + echo "WARNING: WW3 binary | netcdf restart file '${ww3_binary_restart_file}' | '${ww3_netcdf_restart_file}' not found for warm_start='${warm_start}', will start from rest!" + export WW3_restart_from_binary=true fi fi + first_ww3_restart_out=$(date --utc -d "${restart_date:0:8} ${restart_date:8:2} + ${restart_interval} hours" +%Y%m%d%H) + if [[ "${DOIAU:-NO}" == "YES" ]]; then + first_ww3_restart_out=$(date --utc -d "${first_ww3_restart_out:0:8} ${first_ww3_restart_out:8:2} + ${half_window} hours" +%Y%m%d%H) + fi + # Link restart files for (( vdate = first_ww3_restart_out; vdate <= forecast_end_cycle; vdate = $(date --utc -d "${vdate:0:8} ${vdate:8:2} + ${restart_interval} hours" +%Y%m%d%H) )); do - ww3_restart_file="${vdate:0:8}.${vdate:8:2}0000.restart.ww3" - ${NLN} "${DATArestart}/WW3_RESTART/${ww3_restart_file}" "${ww3_restart_file}" + seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds + ww3_restart_ufs_file="ufs.cpld.ww3.r.${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}.nc" + ww3_netcdf_restart_file="${vdate:0:8}.${vdate:8:2}0000.restart.ww3.nc" + ${NLN} "${DATArestart}/WW3_RESTART/${ww3_netcdf_restart_file}" "${ww3_restart_ufs_file}" done # Link output files @@ -410,7 +427,41 @@ WW3_nml() { WW3_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for WW3" - # TODO: Need to add logic to copy restarts from DATArestart/WW3_RESTART to COMOUT_WAVE_RESTART + + # Copy wave namelist from DATA to COMOUT_CONF after the forecast is run (and successfull) + ${NCP} "${DATA}/ww3_shel.nml" "${COMOUT_CONF}/ufs.ww3_shel.nml" + + # Copy WW3 restarts at the end of the forecast segment to COM for RUN=gfs|gefs + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + local restart_file + if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then + echo "Copying WW3 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.restart.ww3.nc" + ${NCP} "${DATArestart}/WW3_RESTART/${restart_file}" \ + "${COMOUT_WAVE_RESTART}/${restart_file}" + fi + fi + + # Copy restarts for next cycle for RUN=gdas|gefs + if [[ "${RUN}" == "gdas" || "${RUN}" == "gefs" ]]; then + local restart_date restart_file + restart_date="${model_start_date_next_cycle}" + echo "Copying WW3 restarts for 'RUN=${RUN}' at ${restart_date}" + restart_file="${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3.nc" + ${NCP} "${DATArestart}/WW3_RESTART/${restart_file}" \ + "${COMOUT_WAVE_RESTART}/${restart_file}" + fi + + # Copy restarts for downstream usage in HAFS + if [[ "${RUN}" == "gdas" ]]; then + local restart_date restart_file + restart_date="${next_cycle}" + echo "Copying WW3 restarts for 'RUN=${RUN}' at ${restart_date}" + restart_file="${restart_date:0:8}.${restart_date:8:2}0000.restart.ww3.nc" + ${NCP} "${DATArestart}/WW3_RESTART/${restart_file}" \ + "${COMOUT_WAVE_RESTART}/${restart_file}" + fi + } diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 14f32378c3..99d1ec8fdc 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -695,6 +695,7 @@ MOM6_predet(){ } +# shellcheck disable=SC2178 CMEPS_predet(){ echo "SUB ${FUNCNAME[0]}: CMEPS before run type determination" @@ -703,6 +704,29 @@ CMEPS_predet(){ if [[ ! -d "${DATArestart}/CMEPS_RESTART" ]]; then mkdir -p "${DATArestart}/CMEPS_RESTART"; fi ${NLN} "${DATArestart}/CMEPS_RESTART" "${DATA}/CMEPS_RESTART" + # For CMEPS, CICE, MOM6 and WW3 determine restart writes + # Note FV3 has its own restart intervals + cmeps_restart_interval=${restart_interval:-${FHMAX}} + # restart_interval = 0 implies write restart at the END of the forecast i.e. at FHMAX + # Convert restart interval into an explicit list for FV3 + if (( cmeps_restart_interval == 0 )); then + if [[ "${DOIAU:-NO}" == "YES" ]]; then + CMEPS_RESTART_FH=$(( FHMAX + half_window )) + else + CMEPS_RESTART_FH=("${FHMAX}") + fi + else + if [[ "${DOIAU:-NO}" == "YES" ]]; then + local restart_interval_start=$(( cmeps_restart_interval + half_window )) + local restart_interval_end=$(( FHMAX + half_window )) + else + local restart_interval_start=${cmeps_restart_interval} + local restart_interval_end=${FHMAX} + fi + CMEPS_RESTART_FH="$(seq -s ' ' "${restart_interval_start}" "${cmeps_restart_interval}" "${restart_interval_end}")" + fi + export CMEPS_RESTART_FH + # TODO: For GEFS, once cycling waves "self-cycles" and therefore needs to have a restart at 6 hour } # shellcheck disable=SC2034 diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index 8033d7686a..d28048f098 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -25,6 +25,7 @@ local SHOUR=${model_start_date:8:2} local FHROT=${IAU_FHROT:-0} local DT_ATMOS=${DELTIM} local RESTART_INTERVAL="${FV3_RESTART_FH[*]}" +local RESTART_FH="${CMEPS_RESTART_FH:-" "}" # QUILTING local QUILTING_RESTART="${QUILTING_RESTART:-${QUILTING}}" local WRITE_GROUP=${WRITE_GROUP:-1} diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh index 5ee4944c18..67bffb1967 100755 --- a/ush/parsing_namelists_WW3.sh +++ b/ush/parsing_namelists_WW3.sh @@ -4,158 +4,61 @@ WW3_namelists(){ # WW3 namelists/input generation - FHMAX_WAV=${FHMAX_WAV:-384} - - # Date and time stuff - - # Beginning time for outpupt may differ from SDATE if DOIAU=YES - export date=$PDY - export YMDH=${PDY}${cyc} - # Roll back $IAU_FHROT hours of DOIAU=YES - if [ "$DOIAU" = "YES" ] - then - WAVHINDH=$(( WAVHINDH + IAU_FHROT )) - fi - # Set time stamps for model start and output - # For special case when IAU is on but this is an initial half cycle - if [ ${IAU_OFFSET:-0} = 0 ]; then - ymdh_beg=$YMDH - else - ymdh_beg=$($NDATE -$WAVHINDH $YMDH) - fi - time_beg="$(echo $ymdh_beg | cut -c1-8) $(echo $ymdh_beg | cut -c9-10)0000" - ymdh_end=$($NDATE $FHMAX_WAV $YMDH) - time_end="$(echo $ymdh_end | cut -c1-8) $(echo $ymdh_end | cut -c9-10)0000" - ymdh_beg_out=$YMDH - time_beg_out="$(echo $ymdh_beg_out | cut -c1-8) $(echo $ymdh_beg_out | cut -c9-10)0000" - - # Restart file times (already has IAU_FHROT in WAVHINDH) - RSTOFFSET=$(( ${WAVHCYC} - ${WAVHINDH} )) - # Update restart time is added offset relative to model start - RSTOFFSET=$(( ${RSTOFFSET} + ${RSTIOFF_WAV} )) - ymdh_rst_ini=$($NDATE ${RSTOFFSET} $YMDH) - RST2OFFSET=$(( DT_2_RST_WAV / 3600 )) - ymdh_rst2_ini=$($NDATE ${RST2OFFSET} $YMDH) # DT2 relative to first-first-cycle restart file - # First restart file for cycling - time_rst_ini="$(echo $ymdh_rst_ini | cut -c1-8) $(echo $ymdh_rst_ini | cut -c9-10)0000" - if [ ${DT_1_RST_WAV} = 1 ]; then - time_rst1_end=${time_rst_ini} - else - RST1OFFSET=$(( DT_1_RST_WAV / 3600 )) - ymdh_rst1_end=$($NDATE $RST1OFFSET $ymdh_rst_ini) - time_rst1_end="$(echo $ymdh_rst1_end | cut -c1-8) $(echo $ymdh_rst1_end | cut -c9-10)0000" - fi - # Second restart file for checkpointing - if [ "${RSTTYPE_WAV}" = "T" ]; then - time_rst2_ini="$(echo $ymdh_rst2_ini | cut -c1-8) $(echo $ymdh_rst2_ini | cut -c9-10)0000" - time_rst2_end=$time_end - # Condition for gdas run or any other run when checkpoint stamp is > ymdh_end - if [ $ymdh_rst2_ini -ge $ymdh_end ]; then - ymdh_rst2_ini=$($NDATE 3 $ymdh_end) - time_rst2_ini="$(echo $ymdh_rst2_ini | cut -c1-8) $(echo $ymdh_rst2_ini | cut -c9-10)0000" - time_rst2_end=$time_rst2_ini - fi - else - time_rst2_ini="$" - time_rst2_end= - DT_2_RST_WAV= - fi - - - set +x - echo ' ' - echo 'Times in wave model format :' - echo '----------------------------' - echo " date / cycle : $date $cycle" - echo " starting time : $time_beg" - echo " ending time : $time_end" - echo ' ' - set_trace - - + FHMAX_WAV="${FHMAX_WAV:-384}" # --------------------------------------------------------------------------- # -# Create ww3_shel.inp - - if [ -f "${PARMgfs}/wave/ww3_shel.inp.tmpl" ]; then - cp "${PARMgfs}/wave/ww3_shel.inp.tmpl" "ww3_shel.inp.tmpl" - fi - if [ ! -f ww3_shel.inp.tmpl ]; then - echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 SHEL INPUT FILE" - exit 12 - fi - # Buoy location file - if [ -f ${PARMgfs}/wave/wave_${NET}.buoys ] + if [ -f "${PARMgfs}/wave/wave_${NET}.buoys" ] then - cp ${PARMgfs}/wave/wave_${NET}.buoys buoy.loc + ${NCP} "${PARMgfs}/wave/wave_${NET}.buoys" "${DATA}/ww3_points.list" fi - if [ -f buoy.loc ] + if [ -f "${DATA}/ww3_points.list" ] then set +x - echo " buoy.loc copied (${PARMgfs}/wave/wave_${NET}.buoys)." + echo "ww3_points.list copied (${PARMgfs}/wave/wave_${NET}.buoys)." set_trace else - echo " FATAL ERROR : buoy.loc (${PARMgfs}/wave/wave_${NET}.buoys) NOT FOUND" + echo "FATAL ERROR : ww3_points.list (${PARMgfs}/wave/wave_${NET}.buoys) NOT FOUND" exit 12 fi -# Initialize inp file parameters -ICELINE='F F' -CURRLINE='F F' -WINDLINE='F F' - -case ${WW3ATMINP} in - 'YES' ) - WINDLINE="T F";; - 'CPL' ) - WINDLINE="C F";; -esac - -case ${WW3ICEINP} in - 'YES' ) - ICELINE="T F";; - 'CPL' ) - ICELINE="C F";; -esac - -case ${WW3CURINP} in - 'YES' ) - CURRLINE="T F";; - 'CPL' ) - CURRLINE="C F";; -esac - -sed -e "s/IOSRV/${IOSRV}/g" \ - -e "s/OUTPARS/${OUTPARS_WAV}/g" \ - -e "s/ICELINE/$ICELINE/g" \ - -e "s/CURRLINE/$CURRLINE/g" \ - -e "s/WINDLINE/$WINDLINE/g" \ - -e "s/RUN_BEG/$time_beg/g" \ - -e "s/RUN_END/$time_end/g" \ - -e "s/OUT_BEG/$time_beg_out/g" \ - -e "s/OUT_END/$time_end/g" \ - -e "s/DTFLD/ $DTFLD_WAV/g" \ - -e "s/GOFILETYPE/ $GOFILETYPE/g" \ - -e "s/POFILETYPE/ $POFILETYPE/g" \ - -e "s/DTPNT/ $DTPNT_WAV/g" \ - -e "s/DTPNT/ $DTPNT_WAV/g" \ - -e "/BUOY_FILE/r buoy.loc" \ - -e "s/BUOY_FILE/DUMMY/g" \ - -e "s/RST_BEG/$time_rst_ini/g" \ - -e "s/RSTTYPE/$RSTTYPE_WAV/g" \ - -e "s/RST_2_BEG/$time_rst2_ini/g" \ - -e "s/DTRST/$DT_1_RST_WAV/g" \ - -e "s/DT_2_RST/$DT_2_RST_WAV/g" \ - -e "s/RST_END/$time_rst1_end/g" \ - -e "s/RST_2_END/$time_rst2_end/g" \ - ww3_shel.inp.tmpl | \ -sed -n "/DUMMY/!p" > ww3_shel.inp - -rm -f ww3_shel.inp.tmpl buoy.loc - -cat ww3_shel.inp + #set coupling to ice/current + WW3_ICE="F" + WW3_CUR="F" + + case ${WW3ICEINP} in + 'YES' ) + WW3_ICE="T";; + 'CPL' ) + WW3_ICE="C";; + esac + + case ${WW3CURINP} in + 'YES' ) + WW3_CUR="T";; + 'CPL' ) + WW3_CUR="C";; + esac + + # Variables used in atparse of shel template + export WW3_IC1="F" + export WW3_IC5="F" + export WW3_ICE + export WW3_CUR + export WW3_OUTPARS="${OUTPARS_WAV}" + export WW3_DTFLD="${DTFLD_WAV}" + export WW3_DTPNT="${DTPNT_WAV}" + # Ensure the template exists + local template=${WW3_INPUT_TEMPLATE:-"${PARMgfs}/ufs/ww3_shel.nml.IN"} + if [[ ! -f "${template}" ]]; then + echo "FATAL ERROR: template '${template}' does not exist, ABORT!" + exit 1 + fi + rm -f "${DATA}/ww3_shel.nml" + atparse < "${template}" >> "${DATA}/ww3_shel.nml" + echo "Rendered ww3_shel.nml:" + cat "${DATA}/ww3_shel.nml" } diff --git a/ush/parsing_ufs_configure.sh b/ush/parsing_ufs_configure.sh index 7ee699ef0a..5ed26f8a45 100755 --- a/ush/parsing_ufs_configure.sh +++ b/ush/parsing_ufs_configure.sh @@ -53,7 +53,7 @@ if [[ "${cplflx}" = ".true." ]]; then local CMEPS_RESTART_DIR="CMEPS_RESTART/" local CPLMODE="${cplmode}" local coupling_interval_fast_sec="${CPL_FAST}" - local RESTART_N="${restart_interval}" + local RESTART_N=999999 local ocean_albedo_limit=0.06 local ATMTILESIZE="${CASE:1}" local ocean_albedo_limit=0.06 @@ -74,12 +74,10 @@ if [[ "${cplwav}" = ".true." ]]; then local wav_model="ww3" local wav_petlist_bounds="$(( ATMPETS+OCNPETS+ICEPETS )) $(( ATMPETS+OCNPETS+ICEPETS+WAVPETS-1 ))" local wav_omp_num_threads="${WAVTHREADS}" - local WW3_user_sets_restname="false" local WW3_user_histname="false" local WW3_historync="false" - local WW3_restartnc="false" - local WW3_restart_from_binary="false" + local WW3_restartnc="true" local WW3_PIO_FORMAT="pnetcdf" local WW3_PIO_IOTASKS=-99 local WW3_PIO_STRIDE=4 @@ -97,6 +95,13 @@ if [[ "${cplchm}" = ".true." ]]; then fi +#Set ESMF_THREADING variable for ufs configure +if [[ "${USE_ESMF_THREADING}" = "YES" ]]; then + local ESMF_THREADING="true" +else + local ESMF_THREADING="false" +fi + # Ensure the template exists if [[ ! -r "${ufs_configure_template}" ]]; then echo "FATAL ERROR: template '${ufs_configure_template}' does not exist, ABORT!" diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 4df03b9444..73287d668e 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -122,7 +122,7 @@ def _get_app_configs(self, run): configs += ['awips'] if options['do_wave']: - configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] + configs += ['waveinit', 'wavepostsbs', 'wavepostpnt'] if options['do_wave_bnd']: configs += ['wavepostbndpnt', 'wavepostbndpntbll'] if options['do_gempak']: @@ -187,7 +187,7 @@ def get_task_names(self): if options['do_jedisnowda']: task_names[run] += ['snowanl'] - wave_prep_tasks = ['waveinit', 'waveprep'] + wave_prep_tasks = ['waveinit'] wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] wave_post_tasks = ['wavepostsbs', 'wavepostpnt'] diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index fffdab6ef9..43f1309b19 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -67,7 +67,7 @@ def _get_app_configs(self, run): configs += ['oceanice_products'] if options['do_wave']: - configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] + configs += ['waveinit', 'wavepostsbs', 'wavepostpnt'] if options['do_wave_bnd']: configs += ['wavepostbndpnt', 'wavepostbndpntbll'] if options['do_gempak']: diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index d2a3e43719..9bc48becb3 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -931,6 +931,11 @@ def _fcst_cycled(self): dep = rocoto.add_dependency(dep_dict) dependencies = rocoto.create_dependency(dep=dep) + if self.options['do_wave']: + wave_job = 'waveprep' if self.options['app'] in ['ATMW'] else 'waveinit' + dep_dict = {'type': 'task', 'name': f'{self.run}_{wave_job}'} + dependencies.append(rocoto.add_dependency(dep_dict)) + if self.options['do_jediocnvar']: dep_dict = {'type': 'task', 'name': f'{self.run}_marineanlfinal'} dependencies.append(rocoto.add_dependency(dep_dict)) @@ -950,11 +955,6 @@ def _fcst_cycled(self): dependencies.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) - if self.options['do_wave']: - dep_dict = {'type': 'task', 'name': f'{self.run}_waveprep'} - dependencies.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) - cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run if self.run in ['gfs']: @@ -1212,7 +1212,7 @@ def wavepostsbs(self): 'maxtries': '&MAXTRIES;' } - fhrs = self._get_forecast_hours('gfs', self._configs['wavepostsbs'], 'wave') + fhrs = self._get_forecast_hours(self.run, self._configs['wavepostsbs'], 'wave') fhr_metatask_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])} metatask_dict = {'task_name': f'{self.run}_wavepostsbs',