diff --git a/.gitignore b/.gitignore
index 1fac25c82..73d447314 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,9 @@
*.exe
*.x
*.log
+*~
+*.db
+*.db.bak
+system.conf
+fix/
+rocoto/*.xml
diff --git a/jobs/JHAFS_ATM_PREP b/jobs/JHAFS_ATM_PREP
index 19b074078..1aa31ac98 100755
--- a/jobs/JHAFS_ATM_PREP
+++ b/jobs/JHAFS_ATM_PREP
@@ -62,8 +62,13 @@ mkdir -p $OUTDIR $DATA
cd $DATA
# Execute ex-script
-${HOMEhafs}/scripts/exhafs_atm_prep.sh
-export err=$?
+if [[ "${run_datm:-no}" == yes ]] ; then
+ ${HOMEhafs}/scripts/exhafs_datm_prep.sh
+ export err=$?
+else
+ ${HOMEhafs}/scripts/exhafs_atm_prep.sh
+ export err=$?
+fi
exit $err
export KEEPDATA=${KEEPDATA:-YES}
diff --git a/jobs/JHAFS_OCN_PREP b/jobs/JHAFS_OCN_PREP
index 9e1060929..6f6eb5af6 100755
--- a/jobs/JHAFS_OCN_PREP
+++ b/jobs/JHAFS_OCN_PREP
@@ -51,8 +51,13 @@ mkdir -p $DATA
cd $DATA
# Execute ex-script
-${HOMEhafs}/scripts/exhafs_ocn_prep.py
-export err=$?
+if [[ "${run_docn:-no}" == yes ]] ; then
+ ${HOMEhafs}/scripts/exhafs_docn_prep.sh
+ export err=$?
+else
+ ${HOMEhafs}/scripts/exhafs_ocn_prep.py
+ export err=$?
+fi
exit $err
export KEEPDATA=${KEEPDATA:-YES}
diff --git a/modulefiles/modulefile.hafs.hera b/modulefiles/modulefile.hafs.hera
index d1097fe63..a2d03f70b 100644
--- a/modulefiles/modulefile.hafs.hera
+++ b/modulefiles/modulefile.hafs.hera
@@ -69,3 +69,6 @@ module use -a /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles
module load rocoto/1.3.3
module load intelpython/3.6.8
+
+# For CDEPS data models:
+module load cdo/1.9.10
diff --git a/modulefiles/modulefile.hafs.jet b/modulefiles/modulefile.hafs.jet
index 097f9e876..f3f524046 100644
--- a/modulefiles/modulefile.hafs.jet
+++ b/modulefiles/modulefile.hafs.jet
@@ -65,3 +65,6 @@ module load nco/4.9.1
module load rocoto/1.3.3
module load intelpython/3.6.5
+
+# For CDEPS data models:
+module load cdo
diff --git a/modulefiles/modulefile.hafs.orion b/modulefiles/modulefile.hafs.orion
index b9b526850..49b0c2e01 100644
--- a/modulefiles/modulefile.hafs.orion
+++ b/modulefiles/modulefile.hafs.orion
@@ -65,3 +65,6 @@ module load nco/4.9.3
module load rocoto/1.3.3
module load intelpython3/2020
+
+# For CDEPS data models:
+module load cdo
diff --git a/modulefiles/modulefile.hafs.wcoss_dell_p3 b/modulefiles/modulefile.hafs.wcoss_dell_p3
index 183be309b..3ceb46949 100644
--- a/modulefiles/modulefile.hafs.wcoss_dell_p3
+++ b/modulefiles/modulefile.hafs.wcoss_dell_p3
@@ -73,3 +73,6 @@ setenv CMAKE_Platform wcoss_dell_p3
module use /usrx/local/dev/emc_rocoto/modulefiles
module load ruby/2.5.1
module load rocoto/1.3.0rc2
+
+# For CDEPS data models:
+module load cdo
diff --git a/parm/cdeps/datm_era5.streams b/parm/cdeps/datm_era5.streams
new file mode 100644
index 000000000..c1332f62f
--- /dev/null
+++ b/parm/cdeps/datm_era5.streams
@@ -0,0 +1,15 @@
+stream_info: ERA5_HOURLY01
+taxmode01: limit
+mapalgo01: redist
+tInterpAlgo01: linear
+readMode01: single
+dtlimit01: 1.5
+stream_offset01: 0
+yearFirst01: _yearFirst_
+yearLast01: _yearLast_
+yearAlign01: _yearFirst_
+stream_vectors01: "u:v"
+stream_mesh_file01: _mesh_atm_
+stream_lev_dimname01: null
+stream_data_files01:
+stream_data_variables01: "u10 Sa_u10m" "v10 Sa_v10m" "t2m Sa_t2m" "skt Sa_tskn" "d2m Sa_tdew" "msl Sa_pslv" "tp Faxa_rain" "cp Faxa_rainc" "lsp Faxa_rainl" "csf Faxa_snowc" "lsf Faxa_snowl" "ssrd Faxa_swdn" "ssr Faxa_swnet" "strd Faxa_lwdn" "str Faxa_lwnet" "aluvp Faxa_swvdr" "aluvd Faxa_swvdf" "alnip Faxa_swndr" "alnid Faxa_swndf" "sshf Faxa_sen" "slhf Faxa_lat" "ewss Faxa_taux" "nsss Faxa_tauy"
diff --git a/parm/cdeps/datm_in b/parm/cdeps/datm_in
new file mode 100755
index 000000000..62b1ab0cf
--- /dev/null
+++ b/parm/cdeps/datm_in
@@ -0,0 +1,14 @@
+&datm_nml
+ datamode = "ERA5"
+ factorfn_data = "null"
+ factorfn_mesh = "null"
+ flds_co2 = .false.
+ flds_presaero = .false.
+ flds_wiso = .false.
+ iradsw = 1
+ model_maskfile = "_mesh_atm_"
+ model_meshfile = "_mesh_atm_"
+ nx_global = 1440
+ ny_global = 721
+ restfilm = "null"
+/
diff --git a/parm/cdeps/docn_ghrsst.streams b/parm/cdeps/docn_ghrsst.streams
new file mode 100644
index 000000000..dbbf450c4
--- /dev/null
+++ b/parm/cdeps/docn_ghrsst.streams
@@ -0,0 +1,15 @@
+stream_info: PRESCRIBED01
+taxmode01: limit
+mapalgo01: redist
+tInterpAlgo01: linear
+readMode01: single
+dtlimit01: 1.5
+stream_offset01: 0
+yearFirst01: _yearFirst_
+yearLast01: _yearLast_
+yearAlign01: _yearFirst_
+stream_vectors01: "null"
+stream_mesh_file01: "INPUT/DOCN_ESMF_mesh.nc"
+stream_lev_dimname01: null
+stream_data_files01:
+stream_data_variables01: "analysed_sst So_t"
diff --git a/parm/cdeps/docn_in b/parm/cdeps/docn_in
new file mode 100644
index 000000000..d863d076e
--- /dev/null
+++ b/parm/cdeps/docn_in
@@ -0,0 +1,9 @@
+&docn_nml
+ datamode = "sstdata"
+ model_maskfile = "_mesh_ocn_"
+ model_meshfile = "_mesh_ocn_"
+ nx_global = _nx_global_
+ ny_global = _ny_global_
+ restfilm = "null"
+ sst_constant_value = -1.0
+/
diff --git a/parm/cdeps/docn_oisst.streams b/parm/cdeps/docn_oisst.streams
new file mode 100644
index 000000000..f72a86858
--- /dev/null
+++ b/parm/cdeps/docn_oisst.streams
@@ -0,0 +1,15 @@
+stream_info: PRESCRIBED01
+taxmode01: limit
+mapalgo01: redist
+tInterpAlgo01: linear
+readMode01: single
+dtlimit01: 1.5
+stream_offset01: 0
+yearFirst01: _yearFirst_
+yearLast01: _yearLast_
+yearAlign01: _yearFirst_
+stream_vectors01: "null"
+stream_mesh_file01: _mesh_ocn_
+stream_lev_dimname01: null
+stream_data_files01:
+stream_data_variables01: "sst So_t"
diff --git a/parm/forecast/globnest/model_configure.tmp b/parm/forecast/globnest/model_configure.tmp
index bf35fa39f..52eb1c465 100644
--- a/parm/forecast/globnest/model_configure.tmp
+++ b/parm/forecast/globnest/model_configure.tmp
@@ -1,4 +1,4 @@
-print_esmf: .false.
+print_esmf: _print_esmf_
start_year: YR
start_month: MN
start_day: DY
diff --git a/parm/forecast/globnest_hwrf/model_configure.tmp b/parm/forecast/globnest_hwrf/model_configure.tmp
index bf35fa39f..52eb1c465 100644
--- a/parm/forecast/globnest_hwrf/model_configure.tmp
+++ b/parm/forecast/globnest_hwrf/model_configure.tmp
@@ -1,4 +1,4 @@
-print_esmf: .false.
+print_esmf: _print_esmf_
start_year: YR
start_month: MN
start_day: DY
diff --git a/parm/forecast/regional/model_configure.tmp b/parm/forecast/regional/model_configure.tmp
index bf35fa39f..52eb1c465 100644
--- a/parm/forecast/regional/model_configure.tmp
+++ b/parm/forecast/regional/model_configure.tmp
@@ -1,4 +1,4 @@
-print_esmf: .false.
+print_esmf: _print_esmf_
start_year: YR
start_month: MN
start_day: DY
diff --git a/parm/forecast/regional/nems.configure.cdeps.tmp b/parm/forecast/regional/nems.configure.cdeps.tmp
new file mode 100644
index 000000000..ecb7b77e2
--- /dev/null
+++ b/parm/forecast/regional/nems.configure.cdeps.tmp
@@ -0,0 +1,150 @@
+##############################################
+##### NEMS Run-Time Configuration File #####
+##############################################
+
+# EARTH #
+EARTH_component_list: MED ATM OCN
+EARTH_attributes::
+ Verbosity = 0
+::
+
+# MED #
+MED_model: cmeps
+_MED_petlist_bounds_
+MED_attributes::
+ Verbosity = 1
+ Diagnostic = 0
+ ATM_model = _atm_model_
+ OCN_model = _ocn_model_
+ MED_model = cmeps
+ history_n = 1
+ history_option = ndays
+ history_ymd = -999
+ coupling_mode = hafs
+ normalization = none
+ merge_type = copy
+::
+
+# ATM #
+ATM_model: _atm_model_
+_ATM_petlist_bounds_
+ATM_attributes::
+ Verbosity = 1
+ Diagnostic = 0
+ mesh_atm = _mesh_atm_
+::
+
+# OCN #
+OCN_model: _ocn_model_
+_OCN_petlist_bounds_
+OCN_attributes::
+ Verbosity = 1
+ Diagnostic = 0
+ mesh_ocn = _mesh_ocn_
+# The following are only used by the hycom ocean model. #
+ cdf_impexp_freq = 3
+ cpl_hour = 0
+ cpl_min = 0
+ cpl_sec = _cpl_dt_
+ base_dtg = _base_dtg_
+ merge_import = _merge_import_
+ skip_first_import = .true.
+ hycom_arche_output = .false.
+ hyc_esmf_exp_output = .true.
+ hyc_esmf_imp_output = .true.
+ import_diagnostics = .false.
+ import_setting = flexible
+ hyc_impexp_file = nems.configure
+ espc_show_impexp_minmax = .true.
+ ocean_start_dtg = _ocean_start_dtg_
+ start_hour = 0
+ start_min = 0
+ start_sec = 0
+ end_hour = _end_hour_
+ end_min = 0
+ end_sec = 0
+::
+
+# Run Sequence #
+runSeq::
+@_cpl_dt_
+ ATM -> MED :remapMethod=redist
+ MED med_phases_post_atm
+ OCN -> MED :remapMethod=redist
+ MED med_phases_post_ocn
+ MED med_phases_prep_atm
+ MED med_phases_prep_ocn_accum
+ MED med_phases_prep_ocn_avg
+ MED -> ATM :remapMethod=redist
+ MED -> OCN :remapMethod=redist
+ ATM
+ OCN
+ MED med_phases_restart_write
+ MED med_phases_history_write
+@
+::
+
+# Other Attributes #
+DRIVER_attributes::
+ start_type = startup
+::
+
+ALLCOMP_attributes::
+ ATM_model = _atm_model_
+ OCN_model = _ocn_model_
+ MED_model = cmeps
+ ScalarFieldCount = 3
+ ScalarFieldIdxGridNX = 1
+ ScalarFieldIdxGridNY = 2
+ ScalarFieldIdxNextSwCday = 3
+ ScalarFieldName = cpl_scalars
+ start_type = startup
+ case_name = ufs.hafs
+ restart_n = 1
+ restart_option = ndays
+ restart_ymd = -999
+ dbug_flag = 20
+ use_coldstart = true
+ orb_eccen = 1.e36
+ orb_iyear = 2000
+ orb_iyear_align = 2000
+ orb_mode = fixed_year
+ orb_mvelp = 1.e36
+ orb_obliq = 1.e36
+ mediator_read_restart = false
+ mediator_present = true
+::
+
+ATM_modelio::
+ diro = .
+ logfile = atm.log
+::
+
+OCN_modelio::
+ diro = .
+ logfile = ocn.log
+::
+
+MED_modelio::
+ diro = .
+ logfile = med.log
+::
+
+# The following are only used by the hycom ocean model. #
+ocn_export_fields::
+ 'sst' 'sea_surface_temperature' 'K'
+ 'mask' 'ocean_mask' '1'
+ 'cpl_scalars' 'cpl_scalars' '1'
+::
+
+# The following are only used by the hycom ocean model. #
+ocn_import_fields::
+ 'taux10' 'mean_zonal_moment_flx_atm' 'N_m-2'
+ 'tauy10' 'mean_merid_moment_flx_atm' 'N_m-2'
+ 'prcp' 'mean_prec_rate' 'kg_m-2_s-1'
+ 'swflxd' 'mean_net_sw_flx' 'W_m-2'
+ 'lwflxd' 'mean_net_lw_flx' 'W_m-2'
+ 'mslprs' 'inst_pres_height_surface' 'Pa'
+ 'sensflx' 'mean_sensi_heat_flx' 'W_m-2'
+ 'latflx' 'mean_laten_heat_flx' 'W_m-2'
+::
diff --git a/parm/forecast/regional_hwrf/model_configure.tmp b/parm/forecast/regional_hwrf/model_configure.tmp
index bf35fa39f..52eb1c465 100644
--- a/parm/forecast/regional_hwrf/model_configure.tmp
+++ b/parm/forecast/regional_hwrf/model_configure.tmp
@@ -1,4 +1,4 @@
-print_esmf: .false.
+print_esmf: _print_esmf_
start_year: YR
start_month: MN
start_day: DY
diff --git a/parm/hafs.conf b/parm/hafs.conf
index d8c2d17a0..bd06376ba 100644
--- a/parm/hafs.conf
+++ b/parm/hafs.conf
@@ -27,6 +27,9 @@ ENS=99 ;; The ensemble number (placeholder)
# Specifies a section (default: [hafsdata]) to use: hafsdata, wcoss_fcst_nco
input_catalog=fcst_{GFSVER}
+docn_source=NONE ;; Data source for data ocean model (GHRSST, OISST, or NONE)
+datm_source=NONE ;; Data source for data atmosphere model (ERA5 or NONE)
+
## Configure file and directory paths
[dir]
HOMEhafs={CDSAVE}/{EXPT}
@@ -57,9 +60,20 @@ gsistatus2=gsi_status.{vit[stormname]}{vit[stnum]:02d}{vit[basin1lc]}.{cycle}
PARMforecast={PARMhafs}/forecast/regional ;; The location where the forecast job will find its parm and namelist files
PARMgsi={PARMhafs}/hafs-gsi/ ;; GSI input data for everything except CRTM
FIXcrtm={FIXhafs}/hafs-crtm-2.2.3/ ;; GSI CRTM input data
+FIXcdeps={FIXhafs}/fix_cdeps ;; CDEPS fix files
+FIXmeshes={FIXcdeps}/meshes ;; premade CDEPS meshes
utilexec={HOMEhafs}/exec ;; utility exe location (placeholder)
+# Data model locations
+DOCNdir=/work/noaa/{disk_project}/{ENV[USER]}/DOCN
+DATMdir=/work/noaa/{disk_project}/{ENV[USER]}/DATM
+
+# Processed input files and meshes during workflow execution:
+docn_input_path={intercom}/cdeps
+datm_input_path={intercom}/cdeps
+
+
## Executable program locations
# Currently not used in the workflow script system
[exe]
@@ -226,6 +240,8 @@ npx=2561
npy=2161
npz=64
+print_esmf=.false. ;; .true. to generate ESMF log files or .false. not to
+
# The write_grid_component related options
quilting=.true.
write_groups=2
@@ -272,6 +288,15 @@ ocean_tasks=60 ;; Number of PEs for the OCN component
ocean_start_dtg=auto ;; epoch day since hycom_epoch=datetime.datetime(1900,12,31,0,0,0), e.g., 43340.00000
merge_import=.true.
+# Data model defaults
+mesh_ocn_in=missing ;; premade mesh to use if make_mesh_ocn=no
+mesh_ocn_gen={WORKhafs}/intercom/cdeps/DOCN_ESMF_mesh.nc ;; do not change
+mesh_atm_in=missing ;; premade mesh to use if make_mesh_atm=no
+mesh_atm_gen={WORKhafs}/intercom/cdeps/DATM_ESMF_mesh.nc ;; do not change
+docn_mesh_nx_global=1440 ;; Dimensions of data ocean model in X direction
+docn_mesh_ny_global=720 ;; Dimensions of data ocean model in Y direction
+
+
[forecast_ens]
# ccpp suites
ccpp_suite_regional_ens={forecast/ccpp_suite_regional}
@@ -368,6 +393,7 @@ COMgfs={dir/COMgfs} ;; input GFS com directory
COMrtofs={dir/COMrtofs} ;; input RTOFS com directory
gtype={grid/gtype} ;; grid type: uniform, stretch, nest, or regional (currently only nest and regional have been tested and supported)
GFSVER={config/GFSVER} ;; Version of GFS input data, e.g., PROD2019, PROD2021
+
# Specify the forecast job resources. Only a few combinations are provided. If
# needed, you may add other options in the site entity files under rocoto/sites.
#FORECAST_RESOURCES=FORECAST_RESOURCES_regional_{forecast/layoutx}x{forecast/layouty}io{forecast/write_groups}x{forecast/write_tasks_per_group}_omp2
@@ -385,9 +411,13 @@ RUN_ENSDA={run_ensda} ;; Do we run the ensda system?
RUN_ENKF={run_enkf} ;; Do we run the self-cycled ensda system with EnKF analysis
RUN_OCEAN={run_ocean} ;; Do we run with ocean coupling?
RUN_WAVE={run_wave} ;; Do we run with wave coupling?
+RUN_DATM={run_datm} ;; Do we run with a data atmosphere using CDEPS?
+RUN_DOCN={run_docn} ;; Do we run with a data ocean using CDEPS?
+RUN_DWAV={run_dwav} ;; Do we run with data waves using CDEPS?
RUN_VORTEXINIT={run_vortexinit} ;; Do we enable vortex initialization?
RUN_HRDGRAPHICS={run_hrdgraphics} ;; Do we run HRD graphics?
RUN_EMCGRAPHICS={run_emcgraphics} ;; Do we run EMC graphics?
SCRUB_COM={scrub_com} ;; Should Rocoto scrub the COM directory?
SCRUB_WORK={scrub_work} ;; Should Rocoto scrub the WORK directory?
-
+MAKE_MESH_ATM={make_mesh_atm} ;; Should the DATM mesh be generated by the workflow?
+MAKE_MESH_OCN={make_mesh_ocn} ;; Should the DOCN mesh be generated by the workflow?
diff --git a/parm/hafs_basic.conf b/parm/hafs_basic.conf
index 2b12b3d0c..b245aa370 100644
--- a/parm/hafs_basic.conf
+++ b/parm/hafs_basic.conf
@@ -47,8 +47,13 @@ run_enkf=no ;; Run self-cycled ensemble data assimilation system with E
run_wave=no ;; Wave coupling (placeholder)
run_ocean=no ;; Ocean coupling
ocean_model=hycom
+run_datm=no ;; Data atmosphere using CDEPS
+run_docn=no ;; Data ocean using CDEPS
+run_dwav=no ;; Data waves using CDEPS
run_hrdgraphics=no ;; Run HRD graphics
run_emcgraphics=no ;; Run EMC graphics
+make_mesh_atm=no ;; Generate DATM mesh in workflow (only for run_datm=yes)
+make_mesh_ocn=no ;; Generate DOCN mesh in workflow (only for run_docn=yes)
# warm_start_opt: 0, coldstart from chgres; 1, warmstart from init; 2,
# warmstart from prior cycle's restart files; 3, warmstart from vortex
diff --git a/parm/hafs_datm.conf b/parm/hafs_datm.conf
new file mode 100644
index 000000000..b5b20f7c8
--- /dev/null
+++ b/parm/hafs_datm.conf
@@ -0,0 +1,32 @@
+[config]
+run_datm=yes ;; Data atmosphere using CDEPS
+run_docn=no ;; Data ocean using CDEPS
+run_ocean=yes ;; Whether to run the ocean model.
+run_dwav=no ;; Data waves using CDEPS. Not implemented.
+make_mesh_atm=no ;; yes=generate mesh_atm_gen; no=copy from FIXmeshes
+
+scrub_com=no ;; the archive job is not set up to handle files generated from datm or docn
+scrub_work=no
+
+; A second conf file sets this: datm_source=ERA5
+
+;; Make sure the atmospheric initialization system is disabled
+run_vortexinit=no ;; vortex initialization
+run_gsi_vr=no ;; GSI based vortex relocation
+run_gsi_vr_fgat=no ;; GSI based vortex relocation for FGAT
+run_gsi_vr_ens=no ;; GSI based vortex relocation for ensda members
+run_gsi=no ;; GSI and FGAT initialization
+run_fgat=no ;; Enable FGAT in DA
+run_envar=no ;; Run GSI with hybrid EnVar with either GDAS ensembles or regional ensembles
+run_ensda=no ;; Run ensemble data assimilation system
+run_enkf=no ;; Run self-cycled ensemble data assimilation system with EnKF analysis
+
+[forecast]
+layoutx=10
+layouty=8
+write_groups=1
+write_tasks_per_group=40
+ocean_tasks=120
+
+[rocotostr]
+FORECAST_RESOURCES=FORECAST_RESOURCES_regional_{forecast/layoutx}x{forecast/layouty}io{forecast/write_groups}x{forecast/write_tasks_per_group}_ocn{forecast/ocean_tasks}_omp1
diff --git a/parm/hafs_datm_era5.conf b/parm/hafs_datm_era5.conf
new file mode 100644
index 000000000..28f71bc1b
--- /dev/null
+++ b/parm/hafs_datm_era5.conf
@@ -0,0 +1,14 @@
+[config]
+datm_source=ERA5
+
+[forecast]
+layoutx=10
+layouty=8
+write_groups=1
+write_tasks_per_group=40
+ocean_tasks=120
+mesh_atm_in={FIXmeshes}/datm_era5_mesh.nc
+
+[rocotostr]
+FORECAST_RESOURCES=FORECAST_RESOURCES_regional_{forecast/layoutx}x{forecast/layouty}io{forecast/write_groups}x{forecast/write_tasks_per_group}_ocn{forecast/ocean_tasks}_omp1
+
diff --git a/parm/hafs_docn.conf b/parm/hafs_docn.conf
new file mode 100644
index 000000000..63f68c7f0
--- /dev/null
+++ b/parm/hafs_docn.conf
@@ -0,0 +1,21 @@
+[config]
+run_datm=no ;; Data atmosphere using CDEPS
+run_docn=yes ;; Data ocean using CDEPS
+run_ocean=no ;; Whether to run the ocean model. Must be no if run_docn=yes.
+run_dwav=no ;; Data waves using CDEPS. Not implemented.
+make_mesh_ocn=no ;; yes=generate mesh_ocn_gen; no=copy from FIXmeshes
+
+# A second file sets this option: docn_source=OISST ;; OISST, RTOFS, or GHRSST
+
+scrub_com=no ;; the archive job is not set up to handle files generated from datm or docn
+scrub_work=no
+
+[forecast]
+ocean_tasks=60
+docn_mesh_nx_global=1440 ;; Dimensions of data ocean model in X direction
+docn_mesh_ny_global=720 ;; Dimensions of data ocean model in Y direction
+
+[rocotostr]
+
+# DOCN
+FORECAST_RESOURCES=FORECAST_RESOURCES_regional_{forecast/layoutx}x{forecast/layouty}io{forecast/write_groups}x{forecast/write_tasks_per_group}_ocn{forecast/ocean_tasks}_omp2
diff --git a/parm/hafs_docn_ghrsst.conf b/parm/hafs_docn_ghrsst.conf
new file mode 100644
index 000000000..7a879b561
--- /dev/null
+++ b/parm/hafs_docn_ghrsst.conf
@@ -0,0 +1,8 @@
+[config]
+docn_source=GHRSST
+
+[forecast]
+docn_mesh_nx_global=11301
+docn_mesh_ny_global=7501
+ocean_tasks=80
+mesh_ocn_in={FIXmeshes}/docn_ghrsst_mesh.nc ;; premade mesh to use if make_mesh_ocn=no
diff --git a/parm/hafs_docn_oisst.conf b/parm/hafs_docn_oisst.conf
new file mode 100644
index 000000000..3505ca4dc
--- /dev/null
+++ b/parm/hafs_docn_oisst.conf
@@ -0,0 +1,7 @@
+[config]
+docn_source=OISST
+
+[forecast]
+docn_mesh_nx_global=1440
+docn_mesh_ny_global=720
+mesh_ocn_in={FIXmeshes}/docn_oisst_mesh.nc ;; premade mesh to use if make_mesh_ocn=no
diff --git a/parm/hafs_holdvars.conf b/parm/hafs_holdvars.conf
index 7b3d9447b..a22970b49 100644
--- a/parm/hafs_holdvars.conf
+++ b/parm/hafs_holdvars.conf
@@ -33,6 +33,11 @@ ENVEQUIV={WHERE_AM_I} ;; Present cluster name
# SUBEXPT={ENV[SUBEXPT]}
#JET_NAME={ENV[JET_NAME]}
#WHERE_AM_I={ENV[ENVEQUIV]}
+DOCN_SOURCE={docn_source}
+DATM_SOURCE={datm_source}
+
+docn_mesh_nx_global={forecast/docn_mesh_nx_global}
+docn_mesh_ny_global={forecast/docn_mesh_ny_global}
CASE={grid/CASE}
LEVS={grid/LEVS}
@@ -108,6 +113,8 @@ write_tasks_per_group={forecast/write_tasks_per_group}
write_dopost={forecast/write_dopost}
output_history={forecast/output_history}
+print_esmf={forecast/print_esmf}
+
glob_k_split={forecast/glob_k_split}
glob_n_split={forecast/glob_n_split}
glob_layoutx={forecast/glob_layoutx}
@@ -185,3 +192,8 @@ cpl_ocean={forecast/cpl_ocean}
ocean_tasks={forecast/ocean_tasks}
ocean_start_dtg={forecast/ocean_start_dtg}
merge_import={forecast/merge_import}
+
+mesh_atm={forecast/mesh_atm}
+mesh_ocn={forecast/mesh_ocn}
+docn_input_path={dir/docn_input_path}
+datm_input_path={dir/datm_input_path}
diff --git a/parm/hafs_holdvars.txt b/parm/hafs_holdvars.txt
index 0a4790db9..1efdd0fd7 100644
--- a/parm/hafs_holdvars.txt
+++ b/parm/hafs_holdvars.txt
@@ -3,6 +3,8 @@
# can also be used for debugging: simply source the
# storm*.holdvars.txt in a ksh/sh/bash shell.
+export TZ=UTC # Orion workaround
+
export envir={ENV[envir|-prod]}
export storm_num={storm_num}
@@ -40,6 +42,24 @@ export COMhafs={COMhafs}
export COMIN={COMIN}
export COMOUT={COMOUT}
export COMgfs={COMgfs}
+export DATMdir={DATMdir}
+export DOCNdir={DOCNdir}
+
+export run_datm={run_datm}
+export run_docn={run_docn}
+export run_dwav={run_dwav}
+
+export make_mesh_atm={make_mesh_atm}
+export mesh_atm={mesh_atm}
+
+export docn_mesh_nx_global={docn_mesh_nx_global}
+export docn_mesh_ny_global={docn_mesh_ny_global}
+export make_mesh_ocn={make_mesh_ocn}
+export mesh_ocn={mesh_ocn}
+export docn_input_path={docn_input_path}
+export datm_input_path={datm_input_path}
+export DOCN_SOURCE={DOCN_SOURCE}
+export DATM_SOURCE={DATM_SOURCE}
export SYNDAThafs={syndat}
export ADECKhafs={ADECKhafs}
@@ -99,6 +119,8 @@ export write_tasks_per_group={write_tasks_per_group}
export write_dopost={write_dopost}
export output_history={output_history}
+export print_esmf={print_esmf}
+
export glob_k_split={glob_k_split}
export glob_n_split={glob_n_split}
export glob_layoutx={glob_layoutx}
diff --git a/parm/system.conf.hera b/parm/system.conf.hera
index e388d25b4..1c923ea6a 100644
--- a/parm/system.conf.hera
+++ b/parm/system.conf.hera
@@ -22,6 +22,8 @@ inputroot=/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFSv16
[dir]
## Non-scrubbed directory for track files, etc. Make sure you edit this.
CDNOSCRUB=/scratch1/NCEPDEV/{disk_project}/noscrub/{ENV[USER]}/hafstrak
+DOCNdir=/scratch1/NCEPDEV/{disk_project}/noscrub/{ENV[USER]}/DOCN
+DATMdir=/scratch1/NCEPDEV/{disk_project}/noscrub/{ENV[USER]}/DATM
## Save directory. Make sure you edit this.
CDSAVE=/scratch1/NCEPDEV/{disk_project}/save/{ENV[USER]}
## Scrubbed directory for large work files. Make sure you edit this.
diff --git a/parm/system.conf.jet b/parm/system.conf.jet
index 4b0cef88e..8601297db 100644
--- a/parm/system.conf.jet
+++ b/parm/system.conf.jet
@@ -23,6 +23,8 @@ inputroot=/lfs4/HFIP/hwrf-data/hafs-input/COMGFSv16
[dir]
## Non-scrubbed directory for track files, etc. Make sure you edit this.
CDNOSCRUB=/lfs4/HFIP/{disk_project}/{ENV[USER]}/noscrub/hafstrak
+DATMdir=/lfs4/HFIP/{disk_project}/{ENV[USER]}/noscrub/DATM
+DOCNdir=/lfs4/HFIP/{disk_project}/{ENV[USER]}/noscrub/DOCN
## Scrubbed directory for large work files. Make sure you edit this.
CDSCRUB=/lfs4/HFIP/{disk_project}/{ENV[USER]}/hafstmp
## Save directory. Make sure you edit this.
diff --git a/parm/system.conf.kjet b/parm/system.conf.kjet
index a117a0c8a..2b4dd7d54 100644
--- a/parm/system.conf.kjet
+++ b/parm/system.conf.kjet
@@ -23,6 +23,8 @@ inputroot=/lfs4/HFIP/hwrf-data/hafs-input/COMGFSv16
[dir]
## Non-scrubbed directory for track files, etc. Make sure you edit this.
CDNOSCRUB=/lfs4/HFIP/{disk_project}/{ENV[USER]}/noscrub/hafstrak
+DATMdir=/lfs4/HFIP/{disk_project}/{ENV[USER]}/noscrub/DATM
+DOCNdir=/lfs4/HFIP/{disk_project}/{ENV[USER]}/noscrub/DOCN
## Scrubbed directory for large work files. Make sure you edit this.
CDSCRUB=/lfs4/HFIP/{disk_project}/{ENV[USER]}/hafstmp
## Save directory. Make sure you edit this.
diff --git a/parm/system.conf.orion b/parm/system.conf.orion
index 3f419c5db..702fc9550 100644
--- a/parm/system.conf.orion
+++ b/parm/system.conf.orion
@@ -22,6 +22,8 @@ inputroot=/work/noaa/hwrf/noscrub/hafs-input/COMGFSv16
[dir]
## Non-scrubbed directory for track files, etc. Make sure you edit this.
CDNOSCRUB=/work/noaa/{disk_project}/noscrub/{ENV[USER]}/hafstrak
+DATMdir=/work/noaa/{disk_project}/noscrub/{ENV[USER]}/DATM
+DOCNdir=/work/noaa/{disk_project}/noscrub/{ENV[USER]}/DOCN
## Save directory. Make sure you edit this.
CDSAVE=/work/noaa/{disk_project}/save/{ENV[USER]}
## Scrubbed directory for large work files. Make sure you edit this.
diff --git a/parm/system.conf.wcoss_cray b/parm/system.conf.wcoss_cray
index 3e419b26c..8c502befe 100644
--- a/parm/system.conf.wcoss_cray
+++ b/parm/system.conf.wcoss_cray
@@ -23,6 +23,8 @@ inputroot=/gpfs/dell1/nco/ops/com/gfs/prod
[dir]
## Non-scrubbed directory for track files, etc. Make sure you edit this.
CDNOSCRUB=/gpfs/hps3/emc/{disk_project}/noscrub/{ENV[USER]}/hafstrak
+DATMdir=/gpfs/hps3/emc/{disk_project}/noscrub/{ENV[USER]}/DATM
+DOCNdir=/gpfs/hps3/emc/{disk_project}/noscrub/{ENV[USER]}/DOCN
## Scrubbed directory for large work files. Make sure you edit this.
CDSCRUB=/gpfs/hps3/ptmp/{ENV[USER]}
## Save directory. Make sure you edit this.
diff --git a/parm/system.conf.wcoss_dell_p3 b/parm/system.conf.wcoss_dell_p3
index 47573253f..1cb462243 100644
--- a/parm/system.conf.wcoss_dell_p3
+++ b/parm/system.conf.wcoss_dell_p3
@@ -22,6 +22,8 @@ inputroot=/gpfs/dell1/nco/ops/com/gfs/prod
[dir]
## Non-scrubbed directory for track files, etc. Make sure you edit this.
CDNOSCRUB=/gpfs/dell2/emc/{disk_project}/noscrub/{ENV[USER]}/hafstrak
+DATMdir=/gpfs/dell2/emc/{disk_project}/noscrub/{ENV[USER]}/DATM
+DOCNdir=/gpfs/dell2/emc/{disk_project}/noscrub/{ENV[USER]}/DOCN
## Scrubbed directory for large work files. Make sure you edit this.
CDSCRUB=/gpfs/dell2/ptmp/{ENV[USER]}
## Save directory. Make sure you edit this.
diff --git a/rocoto/cronjob_hafs_cdeps.sh b/rocoto/cronjob_hafs_cdeps.sh
new file mode 100755
index 000000000..980d372df
--- /dev/null
+++ b/rocoto/cronjob_hafs_cdeps.sh
@@ -0,0 +1,62 @@
+#!/bin/sh
+set -x
+date
+
+# NOAA WCOSS Dell Phase3
+#HOMEhafs=/gpfs/dell2/emc/modeling/noscrub/${USER}/save/HAFS
+#dev="-s sites/wcoss_dell_p3.ent -f"
+#PYTHON3=/usrx/local/prod/packages/python/3.6.3/bin/python3
+
+# NOAA WCOSS Cray
+#HOMEhafs=/gpfs/hps3/emc/hwrf/noscrub/${USER}/save/HAFS
+#dev="-s sites/wcoss_cray.ent -f"
+#PYTHON3=/opt/intel/intelpython3/bin/python3
+
+# NOAA RDHPCS Jet
+#HOMEhafs=/mnt/lfs4/HFIP/hwrfv3/${USER}/HAFS
+#dev="-s sites/xjet.ent -f"
+#PYTHON3=/apps/intel/intelpython3/bin/python3
+
+# MSU Orion
+ HOMEhafs=/work/noaa/hwrf/save/${USER}/HAFS
+ dev="-s sites/orion.ent -f"
+ PYTHON3=/apps/intel-2020/intel-2020/intelpython3/bin/python3
+
+#NOAA RDHPCS Hera
+#HOMEhafs=/scratch1/NCEPDEV/hwrf/save/${USER}/HAFS
+#dev="-s sites/hera.ent -f"
+#PYTHON3=/apps/intel/intelpython3/bin/python3
+
+cd ${HOMEhafs}/rocoto
+
+EXPT=$(basename ${HOMEhafs})
+
+#===============================================================================
+# Here are some simple examples, more examples can be seen in cronjob_hafs_rt.sh
+
+# Run data atmosphere with ERA5
+${PYTHON3} ./run_hafs.py -t ${dev} 2019082900 00L HISTORY config.EXPT=${EXPT} \
+ config.SUBEXPT=${EXPT}_era5 \
+ forecast.output_history=.true. \
+ ../parm/hafs_regional_static.conf ../parm/hafs_hycom.conf \
+ ../parm/hafs_datm.conf ../parm/hafs_datm_era5.conf
+
+# Run data ocean with OISST
+${PYTHON3} ./run_hafs.py -t ${dev} 2019082900 00L HISTORY config.EXPT=${EXPT} \
+ config.SUBEXPT=${EXPT}_oisst \
+ forecast.output_history=.true. \
+ ../parm/hafs_regional_static.conf \
+ ../parm/hafs_docn.conf ../parm/hafs_docn_oisst.conf
+
+# Run data ocean with GHRSST
+${PYTHON3} ./run_hafs.py -t ${dev} 2019082900 00L HISTORY config.EXPT=${EXPT} \
+ config.SUBEXPT=${EXPT}_ghrsst \
+ forecast.output_history=.true. \
+ ../parm/hafs_regional_static.conf \
+ ../parm/hafs_docn.conf ../parm/hafs_docn_ghrsst.conf
+
+#===============================================================================
+
+date
+
+echo 'cronjob done'
diff --git a/rocoto/hafs_workflow.xml.in b/rocoto/hafs_workflow.xml.in
index b274bc5a8..8c5062532 100644
--- a/rocoto/hafs_workflow.xml.in
+++ b/rocoto/hafs_workflow.xml.in
@@ -68,6 +68,8 @@
+
+
@@ -76,6 +78,8 @@
+
+
@@ -193,6 +197,7 @@
&FETCH_INPUT;YES
+ &RUN_DATM;NO
@@ -229,6 +234,7 @@
+ &RUN_DATM;NO
&FETCH_INPUT;YES
@@ -269,6 +275,7 @@
+ &RUN_DATM;NO
&FETCH_INPUT;YES
@@ -291,7 +298,23 @@
-@** if RUN_OCEAN==YES
+@** if RUN_DOCN==YES
+
+ &JOBhafs;/JHAFS_OCN_PREP
+ hafs_ocn_prep_&SID;_@Y@m@d@H
+ &WORKhafs;/hafs_ocn_prep.log
+ &ACCOUNT;
+ &RESERVATION;
+ &QUEUE_PE;
+ &PE_EXTRA;
+ &DOCN_PREP_RESOURCES;
+ &ENV_VARS;
+
+
+
+
+
+@** elseif RUN_OCEAN==YES
&JOBhafs;/JHAFS_OCN_PREP
hafs_ocn_prep_&SID;_@Y@m@d@H
@@ -493,32 +516,65 @@
@** endif
@** endif
+@** if RUN_ATM_INIT==YES
+
+ &JOBhafs;/JHAFS_ATM_INIT
+ hafs_atm_init_&SID;_@Y@m@d@H
+ &WORKhafs;/hafs_atm_init.log
+ &ACCOUNT;
+ &RESERVATION;
+ &QUEUE_PE;
+ &PE_EXTRA;
+ &ATM_INIT_RESOURCES;
+ &ENV_VARS;
+
+
+
+
+@** if gtype==regional
+
+@** endif
+
+
+
+@** endif
+
&JOBhafs;/JHAFS_FORECAST
hafs_forecast_&SID;_@Y@m@d@H
&WORKhafs;/hafs_forecast.log
&ACCOUNT;
&RESERVATION;
- &QUEUE_PE;
+ &QUEUE_FORECAST;
&PE_EXTRA;
&FORECAST_RESOURCES;
&ENV_VARS;
-
-@** if gtype==regional
-
-@** if RUN_OCEAN==YES
+@** if RUN_DATM==YES
+
+@** endif
+@** if RUN_DOCN==YES
-@** endif
+@** endif
+
+ &RUN_DATM;YES
+
+
+@** if gtype==regional
+
@** if RUN_GSI==YES
-
+
@** endif
@** if RUN_GSI_VR==YES
-
+
@** endif
-
+@** endif
+
+
+@** if RUN_OCEAN==YES
+
@** endif
@@ -537,11 +593,14 @@
-
-
-
-
+ already completed and we are not using a data atmosphere. -->
+
+ &RUN_DATM;NO
+
+
+
+
+
@@ -581,11 +640,14 @@
-
-
-
-
+ already completed and we are not using a data atmosphere. -->
+
+ &RUN_DATM;NO
+
+
+
+
+
diff --git a/rocoto/run_hafs.py b/rocoto/run_hafs.py
index 5bd1e7c32..be26a4ca3 100755
--- a/rocoto/run_hafs.py
+++ b/rocoto/run_hafs.py
@@ -29,7 +29,7 @@
# * -n --- disable renumbering of invests into non-invests
# * -W N --- discard invests weaker than N m/s before renumbering
#
-# Conf opitons:
+# Conf options:
# * ../parm/hafs_more.conf --- read this configuration file
# * config.run_gsi=yes --- specify the value of one configuration option
@@ -418,6 +418,9 @@ def fullify(s):
conf.timeless_sanity_check(enset,logger)
except Exception as e:
tcutil.rocoto.sanity_check_failed(logger,e)
+ logger.error("HAFS Sanity Checker Designation: INSANE!")
+ logger.error("Check your configuration for errors.")
+ logger.error("See earlier messages for clues.")
sys.exit(1)
logger.info("I think I'm sane.")
diff --git a/rocoto/sites/hera.ent b/rocoto/sites/hera.ent
index 180178e48..1f0067d1d 100644
--- a/rocoto/sites/hera.ent
+++ b/rocoto/sites/hera.ent
@@ -2,6 +2,7 @@
+
@@ -13,7 +14,10 @@
40">
1G">
+ 5G">
+ 1:ppn=12TOTAL_TASKS1NCTSK1OMP_THREADS100:25:00">
+ 1:ppn=12TOTAL_TASKS1NCTSK1OMP_THREADS100:25:00">
6:ppn=2:tpp=6TOTAL_TASKS12NCTSK2OMP_THREADS600:30:0024G">
1:ppn=40:tpp=1TOTAL_TASKS40NCTSK40OMP_THREADS100:30:00">
6:ppn=20:tpp=1TOTAL_TASKS120NCTSK20OMP_THREADS100:30:00">
@@ -60,6 +64,7 @@
43:ppn=12:tpp=2TOTAL_TASKS516NCTSK12&FORECAST_EXTRA;">
113:ppn=12:tpp=2TOTAL_TASKS1356NCTSK12&FORECAST_EXTRA;">
+ 115:ppn=12:tpp=2TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
133:ppn=12:tpp=2TOTAL_TASKS1596NCTSK12&FORECAST_EXTRA;">
173:ppn=12:tpp=2TOTAL_TASKS2076NCTSK12&FORECAST_EXTRA;">
@@ -90,9 +95,13 @@
25:ppn=20:tpp=2TOTAL_TASKS500NCTSK20&FORECAST_EXTRA;">
70:ppn=20:tpp=2TOTAL_TASKS1400NCTSK20&FORECAST_EXTRA;">
+ 71:ppn=20:tpp=2TOTAL_TASKS1420NCTSK20&FORECAST_EXTRA;">
82:ppn=20:tpp=2TOTAL_TASKS1640NCTSK20&FORECAST_EXTRA;">
106:ppn=20:tpp=2TOTAL_TASKS2120NCTSK20&FORECAST_EXTRA;">
+
+ 6:ppn=40:tpp=1TOTAL_TASKS240NCTSK40OMP_THREADS103:00:00">
+
52:ppn=12:tpp=2TOTAL_TASKS624NCTSK12&FORECAST_EXTRA;">
140:ppn=12:tpp=2TOTAL_TASKS1680NCTSK12&FORECAST_EXTRA;">
diff --git a/rocoto/sites/kjet.ent b/rocoto/sites/kjet.ent
index 29fbc8df4..803ddd55a 100644
--- a/rocoto/sites/kjet.ent
+++ b/rocoto/sites/kjet.ent
@@ -2,6 +2,7 @@
+
--partition=kjet">
--partition=kjet">
@@ -13,7 +14,10 @@
40">
1G">
+ 5G">
+ 1:ppn=12TOTAL_TASKS1NCTSK1OMP_THREADS100:25:00">
+ 1:ppn=12TOTAL_TASKS1NCTSK1OMP_THREADS100:25:00">
6:ppn=2:tpp=6TOTAL_TASKS12NCTSK2OMP_THREADS600:30:0024G">
1:ppn=40:tpp=1TOTAL_TASKS40NCTSK40OMP_THREADS100:30:00">
6:ppn=20:tpp=1TOTAL_TASKS120NCTSK20OMP_THREADS100:30:00">
@@ -60,6 +64,7 @@
43:ppn=12:tpp=2TOTAL_TASKS516NCTSK12&FORECAST_EXTRA;">
113:ppn=12:tpp=2TOTAL_TASKS1356NCTSK12&FORECAST_EXTRA;">
+ 115:ppn=12:tpp=2TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
133:ppn=12:tpp=2TOTAL_TASKS1596NCTSK12&FORECAST_EXTRA;">
173:ppn=12:tpp=2TOTAL_TASKS2076NCTSK12&FORECAST_EXTRA;">
@@ -80,6 +85,7 @@
45:ppn=12:tpp=2TOTAL_TASKS540NCTSK12&FORECAST_EXTRA;">
115:ppn=12:tpp=2TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
+ 116:ppn=12:tpp=2+1:ppn=8:tpp=2TOTAL_TASKS1400NCTSK12&FORECAST_EXTRA;">
135:ppn=12:tpp=2TOTAL_TASKS1620NCTSK12&FORECAST_EXTRA;">
175:ppn=12:tpp=2TOTAL_TASKS2100NCTSK12&FORECAST_EXTRA;">
diff --git a/rocoto/sites/orion.ent b/rocoto/sites/orion.ent
index 447a9e596..b6bd4b2b2 100644
--- a/rocoto/sites/orion.ent
+++ b/rocoto/sites/orion.ent
@@ -2,6 +2,7 @@
+
--partition=orion">
--partition=orion">
@@ -13,7 +14,10 @@
40">
1G">
+ 5G">
+ 1:ppn=1TOTAL_TASKS1NCTSK1OMP_THREADS100:25:0024G">
+ 1:ppn=1TOTAL_TASKS1NCTSK1OMP_THREADS100:25:0024G">
6:ppn=2:tpp=6TOTAL_TASKS12NCTSK2OMP_THREADS600:30:0024G">
1:ppn=40:tpp=1TOTAL_TASKS40NCTSK40OMP_THREADS100:30:00">
6:ppn=20:tpp=1TOTAL_TASKS120NCTSK20OMP_THREADS100:30:00">
@@ -60,6 +64,7 @@
43:ppn=12:tpp=2TOTAL_TASKS516NCTSK12&FORECAST_EXTRA;">
113:ppn=12:tpp=2TOTAL_TASKS1356NCTSK12&FORECAST_EXTRA;">
+ 115:ppn=12:tpp=2TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
133:ppn=12:tpp=2TOTAL_TASKS1596NCTSK12&FORECAST_EXTRA;">
173:ppn=12:tpp=2TOTAL_TASKS2076NCTSK12&FORECAST_EXTRA;">
@@ -85,6 +90,7 @@
22:ppn=20:tpp=2TOTAL_TASKS440NCTSK20&FORECAST_EXTRA;">
67:ppn=20:tpp=2TOTAL_TASKS1340NCTSK20&FORECAST_EXTRA;">
+ 68:ppn=20:tpp=2TOTAL_TASKS1360NCTSK20&FORECAST_EXTRA;">
79:ppn=20:tpp=2TOTAL_TASKS1580NCTSK20&FORECAST_EXTRA;">
103:ppn=20:tpp=2TOTAL_TASKS2060NCTSK20&FORECAST_EXTRA;">
@@ -93,6 +99,9 @@
82:ppn=20:tpp=2TOTAL_TASKS1640NCTSK20&FORECAST_EXTRA;">
106:ppn=20:tpp=2TOTAL_TASKS2120NCTSK20&FORECAST_EXTRA;">
+
+ 6:ppn=40:tpp=1TOTAL_TASKS240NCTSK40OMP_THREADS103:00:00">
+
52:ppn=12:tpp=2TOTAL_TASKS624NCTSK12&FORECAST_EXTRA;">
140:ppn=12:tpp=2TOTAL_TASKS1680NCTSK12&FORECAST_EXTRA;">
diff --git a/rocoto/sites/wcoss_cray.ent b/rocoto/sites/wcoss_cray.ent
index 8b7e6f057..13ab9e23b 100644
--- a/rocoto/sites/wcoss_cray.ent
+++ b/rocoto/sites/wcoss_cray.ent
@@ -2,6 +2,7 @@
+
@@ -13,7 +14,10 @@
24">
1G">
+ 1G">
+ 1:ppn=1TOTAL_TASKS1NCTSK1OMP_THREADS100:25:0024G">
+ 1:ppn=1TOTAL_TASKS1NCTSK1OMP_THREADS100:25:0024G">
3:ppn=4:tpp=6TOTAL_TASKS12NCTSK4OMP_THREADS600:30:00">
1:ppn=24:tpp=1TOTAL_TASKS24NCTSK24OMP_THREADS100:30:00">
30:ppn=12:tpp=1TOTAL_TASKS360NCTSK12OMP_THREADS100:30:00">
@@ -58,6 +62,7 @@
43:ppn=12:tpp=2TOTAL_TASKS516NCTSK12&FORECAST_EXTRA;">
113:ppn=12:tpp=2TOTAL_TASKS1356NCTSK12&FORECAST_EXTRA;">
+ 115:ppn=12:tpp=2TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
133:ppn=12:tpp=2TOTAL_TASKS1596NCTSK12&FORECAST_EXTRA;">
173:ppn=12:tpp=2TOTAL_TASKS2076NCTSK12&FORECAST_EXTRA;">
diff --git a/rocoto/sites/wcoss_dell_p3.ent b/rocoto/sites/wcoss_dell_p3.ent
index 17a3ba108..95586e5c4 100644
--- a/rocoto/sites/wcoss_dell_p3.ent
+++ b/rocoto/sites/wcoss_dell_p3.ent
@@ -2,6 +2,7 @@
+
@@ -13,8 +14,11 @@
24">
1G">
+ 5G">
+ 1:ppn=1TOTAL_TASKS1NCTSK1OMP_THREADS100:25:0024G">
+ 1:ppn=1TOTAL_TASKS1NCTSK1OMP_THREADS100:25:0024G">
1:ppn=6:tpp=1TOTAL_TASKS6NCTSK6OMP_THREADS4-R affinity[core\(4\):distribute=balance]00:30:00">
1:ppn=24:tpp=1TOTAL_TASKS24NCTSK24OMP_THREADS100:30:00">
8:ppn=24:tpp=1TOTAL_TASKS192NCTSK24OMP_THREADS101:30:00">
@@ -59,6 +63,7 @@
43:ppn=12:tpp=1TOTAL_TASKS516NCTSK12&FORECAST_EXTRA;">
113:ppn=12:tpp=1TOTAL_TASKS1356NCTSK12&FORECAST_EXTRA;">
+ 115:ppn=12:tpp=1TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
133:ppn=12:tpp=1TOTAL_TASKS1596NCTSK12&FORECAST_EXTRA;">
173:ppn=12:tpp=1TOTAL_TASKS2076NCTSK12&FORECAST_EXTRA;">
@@ -87,6 +92,11 @@
79:ppn=20:tpp=1TOTAL_TASKS1580NCTSK20&FORECAST_EXTRA;">
103:ppn=20:tpp=1TOTAL_TASKS2060NCTSK20&FORECAST_EXTRA;">
+
+ 10:ppn=24:tpp=1TOTAL_TASKS240NCTSK24OMP_THREADS103:00:00">
+ 114:ppn=12:tpp=1+1:ppn=8:tpp=1TOTAL_TASKS1376NCTSK12&FORECAST_EXTRA;">
+
+
52:ppn=12:tpp=1TOTAL_TASKS624NCTSK12&FORECAST_EXTRA;">
140:ppn=12:tpp=1TOTAL_TASKS1680NCTSK12&FORECAST_EXTRA;">
diff --git a/rocoto/sites/xjet.ent b/rocoto/sites/xjet.ent
index da6ebdc42..695c68ba8 100644
--- a/rocoto/sites/xjet.ent
+++ b/rocoto/sites/xjet.ent
@@ -2,6 +2,7 @@
+
--partition=xjet">
--partition=xjet">
@@ -13,7 +14,10 @@
24">
1G">
+ 5G">
+ 1:ppn=12TOTAL_TASKS1NCTSK1OMP_THREADS100:25:00">
+ 1:ppn=12TOTAL_TASKS1NCTSK1OMP_THREADS100:25:00">
3:ppn=4:tpp=6TOTAL_TASKS12NCTSK4OMP_THREADS600:30:00">
1:ppn=24:tpp=1TOTAL_TASKS24NCTSK24OMP_THREADS100:30:00">
8:ppn=24:tpp=1TOTAL_TASKS192NCTSK24OMP_THREADS101:30:00">
@@ -61,6 +65,7 @@
43:ppn=12:tpp=2TOTAL_TASKS516NCTSK12&FORECAST_EXTRA;">
113:ppn=12:tpp=2TOTAL_TASKS1356NCTSK12&FORECAST_EXTRA;">
+ 115:ppn=12:tpp=2TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
133:ppn=12:tpp=2TOTAL_TASKS1596NCTSK12&FORECAST_EXTRA;">
173:ppn=12:tpp=2TOTAL_TASKS2076NCTSK12&FORECAST_EXTRA;">
@@ -81,6 +86,7 @@
45:ppn=12:tpp=2TOTAL_TASKS540NCTSK12&FORECAST_EXTRA;">
115:ppn=12:tpp=2TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
+ 116:ppn=12:tpp=2+1:ppn=8:tpp=2TOTAL_TASKS1400NCTSK12&FORECAST_EXTRA;">
135:ppn=12:tpp=2TOTAL_TASKS1620NCTSK12&FORECAST_EXTRA;">
175:ppn=12:tpp=2TOTAL_TASKS2100NCTSK12&FORECAST_EXTRA;">
@@ -94,6 +100,9 @@
82:ppn=20:tpp=2TOTAL_TASKS1640NCTSK20&FORECAST_EXTRA;">
106:ppn=20:tpp=2TOTAL_TASKS2120NCTSK20&FORECAST_EXTRA;">
+
+ 10:ppn=24:tpp=1TOTAL_TASKS240NCTSK24OMP_THREADS103:00:00">
+
52:ppn=12:tpp=2TOTAL_TASKS624NCTSK12&FORECAST_EXTRA;">
140:ppn=12:tpp=2TOTAL_TASKS1680NCTSK12&FORECAST_EXTRA;">
diff --git a/rocoto/sites/xjet_hafsv0p2a.ent b/rocoto/sites/xjet_hafsv0p2a.ent
index b0a4544a1..8adfb8a51 100644
--- a/rocoto/sites/xjet_hafsv0p2a.ent
+++ b/rocoto/sites/xjet_hafsv0p2a.ent
@@ -2,6 +2,7 @@
+
--partition=xjet">
--partition=xjet">
@@ -13,7 +14,10 @@
24">
1G">
+ 5G">
+ 1:ppn=1TOTAL_TASKS1NCTSK1OMP_THREADS100:25:00">
+ 1:ppn=1TOTAL_TASKS1NCTSK1OMP_THREADS100:25:00">
3:ppn=4:tpp=6TOTAL_TASKS12NCTSK4OMP_THREADS600:30:00">
1:ppn=24:tpp=1TOTAL_TASKS24NCTSK24OMP_THREADS100:30:00">
30:ppn=24:tpp=1TOTAL_TASKS720NCTSK24OMP_THREADS101:30:00">
@@ -61,6 +65,7 @@
43:ppn=12:tpp=2TOTAL_TASKS516NCTSK12&FORECAST_EXTRA;">
113:ppn=12:tpp=2TOTAL_TASKS1356NCTSK12&FORECAST_EXTRA;">
+ 115:ppn=12:tpp=2TOTAL_TASKS1380NCTSK12&FORECAST_EXTRA;">
133:ppn=12:tpp=2TOTAL_TASKS1596NCTSK12&FORECAST_EXTRA;">
173:ppn=12:tpp=2TOTAL_TASKS2076NCTSK12&FORECAST_EXTRA;">
diff --git a/scripts/exhafs_datm_prep.sh b/scripts/exhafs_datm_prep.sh
new file mode 100755
index 000000000..20b6b1e38
--- /dev/null
+++ b/scripts/exhafs_datm_prep.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+if [[ "$run_datm" != yes ]] ; then
+ echo "This job should only be run if \$run_datm is yes."
+ echo " \$run_datm=\"$run_datm\""
+ echo "Billions of electrons have whizzed by, the universe's entropy has increased, and yet nothing has been accomplished."
+ echo " -> SCRIPT IS EXITING BECAUSE THIS JOB SHOULD NOT BE RUN <- "
+ exit 0
+fi
+
+set -xe
+
+HOMEhafs=${HOMEhafs:-/gpfs/hps3/emc/hwrf/noscrub/${USER}/save/HAFS}
+WORKhafs=${WORKhafs:-/gpfs/hps3/ptmp/${USER}/${SUBEXPT}/${CDATE}/${STORMID}}
+USHhafs=${USHhafs:-${HOMEhafs}/ush}
+CDATE=${CDATE:-${YMDH}}
+APRUNS=${APRUNS:-"aprun -b -j1 -n1 -N1 -d1 -cc depth"}
+
+ifile=$datm_input_path/DATM_input_00000.nc
+ofile=ofile.nc
+mesh_atm="$mesh_atm"
+mesh_dir=$( dirname "$mesh_atm" )
+datm_source=${DATM_SOURCE:-ERA5}
+
+[ -d "$docn_input_path" ] || mkdir -p "$docn_input_path"
+[ -d "$mesh_dir" ] || mkdir "$mesh_dir"
+test -e "$ofile" -o -L "$ofile" && rm -f "$ofile"
+
+if [[ "$make_mesh_ocn" == yes ]] ; then
+ rm -f "$mesh_ocn"
+fi
+
+if [[ "$datm_source" == ERA5 ]] ; then
+ $APRUNS "$USHhafs/cdeps_utils/hafs_era5_prep.sh" "$datm_input_path"
+else
+ echo "ERROR: Unknown data atmosphere source $datm_source. Giving up." 2>&1
+ echo " -> SCRIPT IS FAILING BECAUSE OF INVALID \$DATM_SOURCE VALUE <- "
+ exit 1
+fi
+
+if [[ "$make_mesh_atm" != yes ]] ; then
+ set +x
+ echo "Processed atmosphere files are in $datm_input_path"
+ echo "Will use a premade mesh."
+ echo "Please enjoy your files and have a nice day."
+ set -x
+ exit 0
+fi
+
+set +x
+echo "Generating ESMF mesh from $datm_source files."
+echo "Running in dir \"$PWD\""
+set -x
+
+test -s "$ifile"
+test -r "$ifile"
+
+set +x
+echo "Grid generation $datm_source input file is \"$ifile\""
+echo "Temporary output mesh is $ofile"
+echo "Will deliver to \"$mesh_atm\""
+set -x
+
+# Generate the mesh.
+if [[ "$datm_source" == ERA5 ]] ; then
+ $APRUNS $USHhafs/cdeps_utils/hafs_esmf_mesh.py --ifile "$ifile" --ofile "$ofile" \
+ --overwrite --latvar latitude --lonvar longitude --double
+else
+ echo "ERROR: Unknown data atmosphere source $datm_source. Giving up." 2>&1
+ echo " -> SCRIPT IS FAILING BECAUSE OF INVALID \$DATM_SOURCE VALUE <- "
+ exit 1
+fi
+test -s "$ofile"
+
+# Copy mesh to final destination.
+$USHhafs/produtil_deliver.py -m "$ofile" "$mesh_atm"
+test -s "$mesh_atm"
+
+ls -l "$mesh_atm"
+
+# Rejoice.
+set +x
+echo "DATM $datm_source mesh was successfully generated."
+echo "Enjoy your mesh and have a nice day."
diff --git a/scripts/exhafs_docn_prep.sh b/scripts/exhafs_docn_prep.sh
new file mode 100755
index 000000000..7a532c359
--- /dev/null
+++ b/scripts/exhafs_docn_prep.sh
@@ -0,0 +1,85 @@
+#!/bin/bash
+
+if [[ "$run_docn" != yes ]] ; then
+ echo "This job should only be run if \$run_docn=yes"
+ echo " \$run_docn=\"$run_docn\""
+ echo "Beware! You may anger Poseidon by misusing this script. Avoid coastlines."
+ echo " -> SCRIPT IS EXITING BECAUSE THIS JOB SHOULD NOT BE RUN <- "
+ exit 0
+fi
+
+set -xe
+
+HOMEhafs=${HOMEhafs:-/gpfs/hps3/emc/hwrf/noscrub/${USER}/save/HAFS}
+WORKhafs=${WORKhafs:-/gpfs/hps3/ptmp/${USER}/${SUBEXPT}/${CDATE}/${STORMID}}
+USHhafs=${USHhafs:-${HOMEhafs}/ush}
+CDATE=${CDATE:-${YMDH}}
+APRUNS=${APRUNS:-"aprun -b -j1 -n1 -N1 -d1 -cc depth"}
+
+merged=merged.nc
+ofile=ofile.nc
+mesh_ocn="$mesh_ocn"
+mesh_dir=$( dirname "$mesh_ocn" )
+docn_source=${DOCN_SOURCE:-OISST}
+
+[ -d "$docn_input_path" ] || mkdir -p "$docn_input_path"
+[ -d "$mesh_dir" ] || mkdir "$mesh_dir"
+test -e "$ofile" -o -L "$ofile" && rm -f "$ofile"
+
+if [[ "$make_mesh_ocn" == yes ]] ; then
+ rm -f "$mesh_ocn"
+fi
+
+if [[ "$docn_source" == OISST ]] ; then
+ $APRUNS "$USHhafs/cdeps_utils/hafs_oisst_prep.sh" "$docn_input_path"
+elif [[ "${docn_source}" == RTOFS ]] ; then
+ $APRUNS "$USHhafs/cdeps_utils/hafs_rtofs_prep.sh" "$docn_input_path"
+elif [[ "${docn_source}" == GHRSST ]] ; then
+ $APRUNS "$USHhafs/cdeps_utils/hafs_ghrsst_prep.sh" "$docn_input_path"
+else
+ echo "ERROR: Unknown data ocean source $docn_source. Giving up." 2>&1
+ echo " -> SCRIPT IS FAILING BECAUSE OF INVALID \$DOCN_SOURCE VALUE <- "
+ exit 1
+fi
+
+if [[ "$make_mesh_ocn" != yes ]] ; then
+ set +x
+ echo "Delivered processed ocean files to $docn_input_path"
+ echo "Will use a premade mesh."
+ echo "Please enjoy your files and have a nice day."
+ set -x
+ exit 0
+fi
+
+set +x
+echo "Delivered processed ocean files to $docn_input_path"
+echo "Will now generate mesh in \"$ofile\""
+echo "Will deliver to \"$mesh_ocn\""
+set -x
+
+file0=$docn_input_path/DOCN_input_00000.nc
+
+# Generate the mesh from the merged file.
+if [[ "$docn_source" == OISST ]] ; then
+ $APRUNS $USHhafs/cdeps_utils/hafs_esmf_mesh.py --ifile "$file0" --ofile "$ofile" \
+ --maskvar sst --maskcal --double --overwrite
+elif [[ "${docn_source}" == RTOFS ]] ; then
+ $APRUNS $USHhafs/cdeps_utils/hafs_esmf_mesh.py --ifile "$file0" --ofile "$ofile" \
+ --overwrite --latvar Latitude --lonvar Longitude \
+ --maskvar sst --maskcal —double
+elif [[ "${docn_source}" == GHRSST ]] ; then
+ $APRUNS $USHhafs/cdeps_utils/hafs_esmf_mesh.py --ifile "$file0" --ofile "$ofile" \
+ --maskvar analysed_sst --maskcal --overwrite --double
+fi
+test -s "$ofile"
+
+# Copy mesh and merged file to final destinations.
+$USHhafs/produtil_deliver.py -m "$ofile" "$mesh_ocn"
+test -s "$mesh_ocn"
+
+ls -l "$mesh_ocn"
+
+# Rejoice.
+set +x
+echo "DOCN mesh was successfully generated."
+echo "Enjoy your mesh and have a nice day."
diff --git a/scripts/exhafs_forecast.sh b/scripts/exhafs_forecast.sh
index 33b8ccbef..53d9acd17 100755
--- a/scripts/exhafs_forecast.sh
+++ b/scripts/exhafs_forecast.sh
@@ -218,6 +218,12 @@ ocean_start_dtg=${ocean_start_dtg:-43340.00000}
#end_hour=${NHRS:-126}
merge_import=${merge_import:-.false.}
+# CDEPS related settings
+run_datm=${run_datm:-no}
+run_docn=${run_docn:-no}
+mesh_atm=${mesh_atm:-''}
+mesh_ocn=${mesh_ocn:-''}
+
if [ $gtype = regional ]; then
if [ $quilting = .true. ]; then
ATM_tasks=$(($layoutx*$layouty+$write_groups*$write_tasks_per_group))
@@ -259,12 +265,22 @@ if [ ${run_ocean} = yes ] && [ $cpl_ocean -eq 2 ]; then
runSeq_ALL="OCN -> ATM :remapMethod=bilinear:unmappedaction=ignore:zeroregion=select:srcmaskvalues=0\n ATM -> OCN :remapMethod=bilinear:unmappedaction=ignore:zeroregion=select:srcmaskvalues=1:dstmaskvalues=0\n ATM\n OCN"
fi
# CMEPS based coupling through the bilinear regridding method
-if [ ${run_ocean} = yes ] && [ $cpl_ocean -eq 3 ]; then
+if [ ${run_ocean} = yes ] && [ $cpl_ocean -eq 3 ] && [ ${run_datm} = no ]; then
cplflx=.true.
OCN_petlist_bounds=$(printf "OCN_petlist_bounds: %04d %04d" $ATM_tasks $(($ATM_tasks+$ocean_tasks-1)))
MED_petlist_bounds=$(printf "MED_petlist_bounds: %04d %04d" $ATM_tasks $(($ATM_tasks+$ocean_tasks-1)))
runSeq_ALL="ATM -> MED :remapMethod=redist\n MED med_phases_post_atm\n OCN -> MED :remapMethod=redist\n MED med_phases_post_ocn\n MED med_phases_prep_atm\n MED med_phases_prep_ocn_accum\n MED med_phases_prep_ocn_avg\n MED -> ATM :remapMethod=redist\n MED -> OCN :remapMethod=redist\n ATM\n OCN"
fi
+# CDEPS data models
+if [ ${run_datm} = yes ]; then
+ cplflx=.true.
+ OCN_petlist_bounds=$(printf "OCN_petlist_bounds: %04d %04d" $ATM_tasks $(($ATM_tasks+$ocean_tasks-1)))
+ MED_petlist_bounds=$(printf "MED_petlist_bounds: %04d %04d" 0 $(($ATM_tasks-1)))
+elif [ ${run_docn} = yes ]; then
+ cplflx=.true.
+ OCN_petlist_bounds=$(printf "OCN_petlist_bounds: %04d %04d" $ATM_tasks $(($ATM_tasks+$ocean_tasks-1)))
+ MED_petlist_bounds=$(printf "MED_petlist_bounds: %04d %04d" $ATM_tasks $(($ATM_tasks+$ocean_tasks-1)))
+fi
# Prepare the output RESTART dir
if [ ${ENSDA} = YES ]; then
@@ -280,12 +296,16 @@ else
mkdir -p ${RESTARTout}
fi
+mkdir -p INPUT
+
+if [ ${run_datm} = no ]; then
+
# Link the input IC and/or LBC files into the INPUT dir
if [ ! -d $INPdir ]; then
echo "FATAL ERROR: Input data dir does not exist: $INPdir"
exit 9
fi
-mkdir -p INPUT
+
${NLN} ${INPdir}/*.nc INPUT/
# Copy fix files
@@ -534,6 +554,12 @@ sed -e "s/_blocksize_/${blocksize:-64}/g" \
-e "s/_merge_import_/${merge_import:-.false.}/g" \
input.nml.tmp > input.nml
+fi # if regional
+
+fi # if not cdeps datm
+
+if [ $gtype = regional ]; then
+
if [ ${run_ocean} = yes ]; then
# Copy hycom related files
${NCP} ${WORKhafs}/intercom/hycominit/hycom_settings hycom_settings
@@ -607,7 +633,100 @@ cat > temp << EOF
${yr}${mn}${dy}.${cyc}Z.${CASE}.32bit.non-hydro
$yr $mn $dy $cyc 0 0
EOF
+
+enddate=`${NDATE} +${NHRS} $CDATE`
+endyr=`echo $enddate | cut -c1-4`
+
+if [ ${run_datm} = no ]; then
cat temp diag_table.tmp > diag_table
+fi
+
+#---------------------------------------------------
+# Copy CDEPS input, parm, and fix files if required.
+#---------------------------------------------------
+
+if [ ${run_datm} = yes ]; then
+ datm_source=${DATM_SOURCE:-ERA5}
+ ${NCP} ${PARMforecast}/model_configure.tmp .
+ ${NLN} ${mesh_atm} INPUT/DATM_ESMF_mesh.nc
+ ${NLN} "$datm_input_path"/DATM_input*nc INPUT/
+
+ # Generate docn.streams from template specific to the model:
+ ${NCP} ${PARMhafs}/cdeps/datm_$( echo "$datm_source" | tr A-Z a-z ).streams datm.streams
+ for file in INPUT/DATM_input*nc ; do
+ if [[ -s "$file" ]] ; then
+ sed -i "/^stream_data_files01:/ s/$/\ \"INPUT\/$(basename $file)\"/" datm.streams
+ fi
+ done
+ sed -i "s/_yearFirst_/$yr/g" datm.streams
+ sed -i "s/_yearLast_/$endyr/g" datm.streams
+ sed -i "s/_mesh_atm_/INPUT\/DATM_ESMF_mesh.nc/g" datm.streams
+
+ # Generate datm_in and nems.configure from model-independent templates:
+ ${NCP} ${PARMhafs}/cdeps/datm_in .
+ sed -i "s/_mesh_atm_/INPUT\/DATM_ESMF_mesh.nc/g" datm_in
+
+ ${NCP} ${PARMforecast}/nems.configure.cdeps.tmp ./
+ sed -e "s/_ATM_petlist_bounds_/${ATM_petlist_bounds}/g" \
+ -e "s/_MED_petlist_bounds_/${MED_petlist_bounds}/g" \
+ -e "s/_OCN_petlist_bounds_/${OCN_petlist_bounds}/g" \
+ -e "s/_cpl_dt_/${cpl_dt}/g" \
+ -e "s/_base_dtg_/${CDATE}/g" \
+ -e "s/_ocean_start_dtg_/${ocean_start_dtg}/g" \
+ -e "s/_end_hour_/${NHRS}/g" \
+ -e "s/_merge_import_/${merge_import:-.true.}/g" \
+ -e "s/_mesh_atm_/INPUT\/DATM_ESMF_mesh.nc/g" \
+ -e "/_mesh_ocn_/d" \
+ -e "/_system_type_/d" \
+ -e "s/_atm_model_/datm/g" \
+ -e "s/_ocn_model_/hycom/g" \
+ nems.configure.cdeps.tmp > nems.configure
+
+elif [ ${run_docn} = yes ]; then
+ MAKE_MESH_OCN=$( echo "${make_mesh_ocn:-no}" | tr a-z A-Z )
+ ${NLN} "$docn_input_path"/DOCN_input*nc INPUT/
+
+ #${NCP} ${PARMhafs}/cdeps/docn_in .
+ #${NCP} ${PARMhafs}/cdeps/docn.streams .
+ docn_source=${DOCN_SOURCE:-OISST}
+
+ # Generate docn_in from template:
+ ${NCP} ${PARMhafs}/cdeps/docn_in docn_in_template
+ sed -e "s/_mesh_ocn_/INPUT\/DOCN_ESMF_mesh.nc/g" \
+ -e "s/_nx_global_/$docn_mesh_nx_global/g" \
+ -e "s/_ny_global_/$docn_mesh_ny_global/g" \
+ < docn_in_template > docn_in
+
+ # Generate docn.streams from template specific to the model:
+ ${NCP} ${PARMhafs}/cdeps/docn_$( echo "$docn_source" | tr A-Z a-z ).streams docn.streams
+ sed -i "s/_yearFirst_/$yr/g" docn.streams
+ sed -i "s/_yearLast_/$endyr/g" docn.streams
+ sed -i "s/_mesh_ocn_/INPUT\/DOCN_ESMF_mesh.nc/g" docn.streams
+ for file in INPUT/oisst*.nc INPUT/sst*.nc INPUT/DOCN_input*.nc ; do
+ if [[ -s "$file" ]] ; then
+ sed -i "/^stream_data_files01:/ s/$/\ \"INPUT\/$(basename $file)\"/" docn.streams
+ fi
+ done
+
+ ${NLN} "${mesh_ocn}" INPUT/DOCN_ESMF_mesh.nc
+
+ ${NCP} ${PARMforecast}/nems.configure.cdeps.tmp ./
+ sed -e "s/_ATM_petlist_bounds_/${ATM_petlist_bounds}/g" \
+ -e "s/_MED_petlist_bounds_/${MED_petlist_bounds}/g" \
+ -e "s/_OCN_petlist_bounds_/${OCN_petlist_bounds}/g" \
+ -e "s/_cpl_dt_/${cpl_dt}/g" \
+ -e "s/_base_dtg_/${CDATE}/g" \
+ -e "s/_ocean_start_dtg_/${ocean_start_dtg}/g" \
+ -e "s/_end_hour_/${NHRS}/g" \
+ -e "s/_merge_import_/${merge_import:-.true.}/g" \
+ -e "/_mesh_atm_/d" \
+ -e "s/_mesh_ocn_/INPUT\/DOCN_ESMF_mesh.nc/g" \
+ -e "s/_system_type_/ufs/g" \
+ -e "s/_atm_model_/fv3/g" \
+ -e "s/_ocn_model_/docn/g" \
+ nems.configure.cdeps.tmp > nems.configure
+
+fi
sed -e "s/YR/$yr/g" -e "s/MN/$mn/g" -e "s/DY/$dy/g" \
-e "s/H_R/$cyc/g" -e "s/NHRS/$NHRS/g" \
@@ -628,6 +747,7 @@ sed -e "s/YR/$yr/g" -e "s/MN/$mn/g" -e "s/DY/$dy/g" \
-e "s/_LAT2_/$output_grid_lat2/g" \
-e "s/_DLON_/$output_grid_dlon/g" \
-e "s/_DLAT_/$output_grid_dlat/g" \
+ -e "s/_print_esmf_/${print_esmf:-.false.}/g" \
model_configure.tmp > model_configure
# Copy fix files needed by inline_post
@@ -650,7 +770,7 @@ ${APRUNC} ./hafs_forecast.x 1>out.forecast 2>err.forecast
cat ./out.forecast
cat ./err.forecast
-if [ $gtype = regional ]; then
+if [ $gtype = regional ] && [ ${run_datm} = no ]; then
# Rename the restart files with a proper convention if needed
cd RESTART
@@ -676,6 +796,6 @@ if [ ! -s RESTART/oro_data.nc ]; then
${NCP} -pL INPUT/oro_data.nc RESTART/
fi
-fi # if [ $gtype = regional ]; then
+fi # if [ $gtype = regional ] && [ ${run_datm} = no ]; then
exit
diff --git a/sorc/build_forecast.sh b/sorc/build_forecast.sh
index 301cbd203..70b51939a 100755
--- a/sorc/build_forecast.sh
+++ b/sorc/build_forecast.sh
@@ -6,9 +6,15 @@ cwd=`pwd`
if [ $target = hera ]; then target=hera.intel ; fi
if [ $target = orion ]; then target=orion.intel ; fi
if [ $target = jet ]; then target=jet.intel ; fi
+if [ $target = cheyenne ]; then target=cheyenne.intel ; fi
+if [ $target = wcoss_cray ]; then
+ app=HAFS
+else
+ app=HAFS-ALL
+fi
cd hafs_forecast.fd/tests
-./compile.sh "$target" "-DAPP=HAFS -DCCPP_SUITES=FV3_HAFS_v0_gfdlmp_tedmf_nonsst,FV3_HAFS_v0_gfdlmp_tedmf,FV3_HAFS_v0_hwrf_thompson,FV3_HAFS_v0_hwrf -D32BIT=ON" 32bit YES NO
+./compile.sh "$target" "-DAPP=$app -DCCPP_SUITES=FV3_HAFS_v0_gfdlmp_tedmf_nonsst,FV3_HAFS_v0_gfdlmp_tedmf,FV3_HAFS_v0_hwrf_thompson,FV3_HAFS_v0_hwrf -D32BIT=ON" 32bit YES NO
exit
diff --git a/sorc/link_fix.sh b/sorc/link_fix.sh
index bbe75f642..ecc5b1a63 100755
--- a/sorc/link_fix.sh
+++ b/sorc/link_fix.sh
@@ -24,7 +24,7 @@ else
exit 1
fi
-for subdir in fix_am fix_orog fix_fv3_gmted2010 fix_sfc_climo fix_hycom hwrf-crtm-2.2.6;
+for subdir in fix_am fix_orog fix_fv3_gmted2010 fix_sfc_climo fix_hycom hwrf-crtm-2.2.6 fix_cdeps;
do
ln -sf ${FIXROOT}/${subdir} ./
done
diff --git a/sorc/machine-setup.sh b/sorc/machine-setup.sh
index 9fbb4ee73..786175330 100755
--- a/sorc/machine-setup.sh
+++ b/sorc/machine-setup.sh
@@ -90,12 +90,12 @@ elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then
module purge
source /usrx/local/prod/lmod/lmod/init/$__ms_shell
elif [[ -d /glade ]] ; then
- # We are on NCAR Yellowstone
+ # We are on NCAR Cheyenne
if ( ! eval module help > /dev/null 2>&1 ) ; then
echo load the module command 1>&2
. /usr/share/Modules/init/$__ms_shell
fi
- target=yellowstone
+ target=cheyenne
module purge
elif [[ -d /lustre && -d /ncrc ]] ; then
# We are on GAEA.
diff --git a/ush/cdeps_utils/hafs_era5_download.py b/ush/cdeps_utils/hafs_era5_download.py
new file mode 100755
index 000000000..f650d1478
--- /dev/null
+++ b/ush/cdeps_utils/hafs_era5_download.py
@@ -0,0 +1,240 @@
+#! /usr/bin/env python3
+
+# This next line will abort in any version earlier than Python 3.6:
+f'This script requires Python 3.6 or newer.'
+
+import time
+import subprocess
+import contextlib
+import os
+import tempfile
+import getopt
+import re
+import logging
+import datetime
+import sys
+
+try:
+ import cdsapi
+except ImportError as ie:
+ sys.stderr.write("""You are missing the cdsapi module!
+You must install it to run this script.
+
+ pip install cdsapi --user
+
+You will also need to register on the cdsapi website, sign the ERA5
+license agreement, get a key, and put the key in your ~/.cdsapi file.
+""")
+
+import produtil.setup, produtil.fileop, produtil.locking
+
+# Constants
+UTILITY_NAME = 'hafs_era5_download'
+VERSION_STRING = '0.0.1'
+LOGGING_DOMAIN = UTILITY_NAME
+DATASET = 'reanalysis-era5-single-levels'
+PRODUCT_TYPE = 'reanalysis'
+VARIABLES = [
+ '10m_u_component_of_wind', '10m_v_component_of_wind', '2m_dewpoint_temperature',
+ '2m_temperature', 'convective_precipitation', 'convective_snowfall',
+ 'large_scale_precipitation', 'large_scale_snowfall', 'mean_sea_level_pressure',
+ 'near_ir_albedo_for_diffuse_radiation', 'near_ir_albedo_for_direct_radiation',
+ 'uv_visible_albedo_for_diffuse_radiation', 'uv_visible_albedo_for_direct_radiation',
+ 'surface_latent_heat_flux', 'surface_sensible_heat_flux',
+ 'surface_solar_radiation_downwards', 'surface_thermal_radiation_downwards',
+ 'surface_pressure', 'total_precipitation', 'skin_temperature',
+ 'eastward_turbulent_surface_stress', 'northward_turbulent_surface_stress',
+ 'surface_net_solar_radiation', 'surface_net_thermal_radiation'
+]
+FILE_FORMAT = 'netcdf'
+CYCLING_INTERVAL = datetime.timedelta(seconds=3600*24)
+EPSILON = datetime.timedelta(seconds=5) # epsilon for time comparison: five seconds
+
+# Non-constant globals:
+dayset=set() # list of YYYYMMDD strings
+happy=True # False = something failed
+filename_format = 'ERA5_%Y%m%d'
+swap_latitudes=True
+
+def usage(why=None):
+ print(f'''Synopsis: {UTILITY_NAME} [options] day [day [...]]
+
+Downloads the listed days of data. Days can be specified as:
+ 20210815 = specify one day: August 15, 2021
+ 20210815-20210819 = specify a range of days: August 15th to 19th, 2021
+ 2018 = specify an entire year (2018)
+
+Options:
+ -q | --quiet = log only warnings and errors
+ -v | --verbose = log all messages
+ -n | --no-invertlat = do not run "cdo invertlat" on downloaded files
+ -F format | --format format = filename format as in strftime(3)
+ -i | --invertlat = DO run "cdo inverlat". This is the default
+ --version = print {UTILITY_NAME} {VERSION_STRING}
+ --help = this message
+
+Format example: ERA5_%Y%m%d = ERA5_20210815
+Script will automatically append ".nc"
+''')
+ if why:
+ sys.stderr.write(f'SCRIPT IS ABORTING BECAUSE: {why}\n')
+ return 1
+ return 0
+
+# Function that makes the singleton for cdsapi client:
+_client = None
+def client():
+ global _client
+ if not _client:
+ logger.info('creating cdsapi client')
+ _client=cdsapi.Client()
+ return _client
+
+# Tell CDO to flip latitudes in a NetCDF file:
+def cdo_swap_latitudes(filename_in,filename_out):
+ logger.info('Flip latitudes in "'+str(filename_in)+'" and write to "'+str(filename_out)+'"')
+ cmd = [ 'cdo', 'invertlat', filename_in, filename_out ]
+ logger.info(f'''Run "{'" "'.join(cmd) }"''')
+ result = subprocess.run(cmd)
+ result.check_returncode()
+
+def quiet_remove(filename):
+ with contextlib.suppress(FileNotFoundError):
+ os.remove(filename)
+
+# The meat of the program: retrieve a file
+def request(when):
+ filename_base = when.strftime(filename_format)
+ filename_download = filename_base+'_download.nc'
+ filename_invert = filename_base+'_invert.nc'
+ filename_lock = filename_base+'.lock'
+ filename_final = filename_base+'.nc'
+ if os.path.exists(filename_final):
+ logger.info(filename_final+': already exists. Skipping.')
+ return
+ with produtil.locking.LockFile(filename_lock,logger):
+ try:
+ if os.path.exists(filename_final):
+ logger.info(filename_final+': already exists (after lock). Skipping.')
+ return
+ quiet_remove(filename_download)
+ quiet_remove(filename_invert)
+ logger.info(filename_download+': retrieve '+str(when)+'...')
+ request = {
+ 'product_type': PRODUCT_TYPE,
+ 'variable': VARIABLES,
+ 'year': '%04d'%int(when.year),
+ 'month': [ '%02d'%int(when.month) ],
+ 'day': [ '%02d'%int(when.day) ],
+ 'time': [ '%02d'%hour for hour in range(24) ],
+ 'format': FILE_FORMAT,
+ }
+ # super-wordy debugging: logger.debug(filename_download+': request is '+str(request))
+ client().retrieve(DATASET,request,filename_download)
+ filename_copy=filename_download
+ if swap_latitudes:
+ cdo_swap_latitudes(filename_download,filename_invert)
+ filename_copy=filename_invert
+ produtil.fileop.deliver_file(filename_copy,filename_final,logger=logger,
+ keep=False,verify=False,moveok=True,force=True)
+ quiet_remove(filename_download)
+ quiet_remove(filename_invert)
+ quiet_remove(filename_lock)
+ except Exception as e:
+ quiet_remove(filename_download)
+ quiet_remove(filename_invert)
+ raise e
+
+# Parse arguments and initialize logging:
+log_level = logging.INFO
+optlist,args = getopt.getopt(sys.argv[1:],'qveniF:',[
+ 'version','help','verbose','quiet','invertlat','no-invertlat','format'])
+if len(args)<1:
+ exit(usage("No arguments provided!"))
+for optarg in optlist:
+ if optarg[0] in ['-q', '--quiet']:
+ log_level = logging.WARNING
+ elif optarg[0] in ['-v', '--verbose']:
+ log_level = logging.DEBUG
+ elif optarg[0] in ['-i', '--invertlat']:
+ invertlat = True
+ elif optarg[0] in ['-n', '--no-invertlat']:
+ invertlat = False
+ elif optarg[0] in ['-F', '--format']:
+ filename_format = optarg[1]
+ elif optarg[0]=='--help':
+ exit(usage())
+ elif optarg[0]=='--version':
+ print(UTILITY_NAME+' '+VERSION_STRING)
+ exit(0)
+logger = logging.getLogger(LOGGING_DOMAIN)
+
+produtil.setup.setup(level=log_level,send_dbn=False)
+
+# Parse the days. This loop was modified from run_hafs.py:
+for arg in args:
+ if re.match('\A\d{8}\Z',arg):
+ logger.info('single date/time')
+ # Single date/time
+ dayset.add(arg)
+ elif re.match('\A\d{4}\Z',arg):
+ logger.info('year')
+ # Year
+ start=datetime.datetime(int(arg,10),1,1,0,0,0)
+ end=datetime.datetime(int(arg,10),12,31,23,59,0)
+ now=start
+ while now=len(daylist):
+ logger.info(f'{day}: sleep for a little while... 30 second snooze...')
+ time.sleep(30)
+ logger.info(f'{day}: done sleeping.')
+ iloop=0
+ except Exception as ex: # Unfortunately, cdsapi raises Exception
+ happy = False
+ logger.error(f'CDSAPI failed to download day {day}: {ex}',exc_info=ex)
+
+# Exit 0 on success, 1 on failure:
+exit( 0 if happy else 1 )
diff --git a/ush/cdeps_utils/hafs_era5_prep.sh b/ush/cdeps_utils/hafs_era5_prep.sh
new file mode 100755
index 000000000..d072ac3fa
--- /dev/null
+++ b/ush/cdeps_utils/hafs_era5_prep.sh
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+set -xe
+
+set -u
+output_path="$1"
+set +u
+
+if ( ! which cdo ) ; then
+ set +x
+ echo "The \"cdo\" command isn't in your path! Go find it and rerun this job." 1>&2
+ set -x
+ exit 1
+fi
+
+if ( ! which ncwa ) ; then
+ set +x
+ echo "The \"ncwa\" command from the NetCDF Data Operators (nco) is not in your path! Go find the nco and rerun this job." 1>&2
+ set -x
+ exit 1
+fi
+
+HOMEhafs=${HOMEhafs:-/gpfs/hps3/emc/hwrf/noscrub/${USER}/save/HAFS}
+WORKhafs=${WORKhafs:-/gpfs/hps3/ptmp/${USER}/${SUBEXPT}/${CDATE}/${STORMID}}
+USHhafs=${USHhafs:-${HOMEhafs}/ush}
+CDATE=${CDATE:-${YMDH}}
+
+set -u
+
+# Start & end times are at day precision, not hour
+m1date=$( date -d "${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}t${CDATE:8:2}:00:00+00 -24 hours" +%Y%m%d )
+p1date=$( date -d "${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}t${CDATE:8:2}:00:00+00 +$(( NHRS+24 )) hours" +%Y%m%d )
+now=$m1date
+end=$p1date
+
+set +x
+echo "Linking ERA5 files."
+echo "Running in dir \"$PWD\""
+echo "Will link ERA5 files into $output_path"
+echo "ERA5 Date range is $now to $end"
+set -x
+
+rm -f DATM_input* merged.nc
+
+# Generate the filenames.
+usefiles=''
+missing=''
+itime=0
+infinity=9999 # infinite loop guard
+while (( now <= end && itime < infinity )) ; do
+ infile="$DATMdir/ERA5_${now:0:8}.nc"
+ if [[ ! -s "$infile" || ! -r "$infile" ]] ; then
+ echo "ERA5 input file is missing: $infile" 2>&1
+ missing="$missing $infile"
+ else
+ usefiles="$usefiles $infile"
+ outfile=$( printf "%s/DATM_input_%05d.nc" "$output_path" $itime )
+ ln -sf "$infile" "$outfile"
+ fi
+ now=$( date -d "${now:0:4}-${now:4:2}-${now:6:2}t00:00:00+00 +24 hours" +%Y%m%d )
+ itime=$(( itime+1 ))
+done
+if (( itime >= infinity )) ; then
+ echo "Infinite loop detected! The \"date\" command did not behave as expected. Aborting!" 1>&2
+ exit 1
+fi
+
+if [[ "${missing:-}Q" != Q ]] ; then
+ set +x
+ echo "You are missing some ERA5 input files!"
+ for infile in $missing ; do
+ echo " missing: $infile"
+ done
+ echo " -> SCRIPT IS ABORTING BECAUSE INPUT FILES ARE MISSING <- "
+ exit 1
+fi
+
+set +x
+echo "ERA5 input files are:"
+for f in $usefiles ; do
+ echo " - $f"
+done
+set -x
+
+# Rejoice.
+set +x
+echo "Successfully linked ERA5 files at $output_path"
+echo "Please enjoy your files and have a nice day."
diff --git a/ush/cdeps_utils/hafs_esmf_mesh.py b/ush/cdeps_utils/hafs_esmf_mesh.py
new file mode 100755
index 000000000..8cabf9f98
--- /dev/null
+++ b/ush/cdeps_utils/hafs_esmf_mesh.py
@@ -0,0 +1,430 @@
+#!/usr/bin/env python3
+
+import os, sys, getopt
+import argparse
+try:
+ import numpy as np
+ import xarray as xr
+ import dask.array as da
+ import dask.dataframe as dd
+ from dask.diagnostics import ProgressBar
+ from datetime import datetime
+ import pandas as pd
+except ImportError as ie:
+ sys.stderr.write("""You are missing some modules!
+The following commands can be used to install required Python modules to run this script
+
+ pip install xarray --user
+ pip install dask --user
+ pip install "dask[array]" --upgrade --user
+ pip install "dask[dataframe]" --upgrade --user
+""")
+ sys.stderr.write(str(ie))
+ exit(2)
+
+
+def calculate_corners(center_lat, center_lon):
+ """Calculate corner coordinates by averaging neighbor cells
+ """
+
+ # get rank
+ rank = len(center_lat.dims)
+
+ if rank == 1:
+ # get dimensions
+ nlon = center_lon.size
+ nlat = center_lat.size
+
+ # convert center points from 1d to 2d
+ center_lat2d = da.broadcast_to(center_lat.values[None,:], (nlon, nlat))
+ center_lon2d = da.broadcast_to(center_lon.values[:,None], (nlon, nlat))
+ elif rank == 2:
+ # get dimensions
+ dims = center_lon.shape
+ nlon = dims[0]
+ nlat = dims[1]
+
+ # just rename and convert to dask array
+ center_lat2d = da.from_array(center_lat)
+ center_lon2d = da.from_array(center_lon)
+ else:
+ print('Unrecognized grid! The rank of coordinate variables can be 1 or 2 but it is {}.'.format(rank))
+ sys.exit(2)
+
+ # calculate corner coordinates for latitude, counterclockwise order, imposing Fortran ordering
+ center_lat2d_ext = da.from_array(np.pad(center_lat2d.compute(), (1,1), mode='reflect', reflect_type='odd'))
+
+ ur = (center_lat2d_ext[1:-1,1:-1]+
+ center_lat2d_ext[0:-2,1:-1]+
+ center_lat2d_ext[1:-1,2:]+
+ center_lat2d_ext[0:-2,2:])/4.0
+ ul = (center_lat2d_ext[1:-1,1:-1]+
+ center_lat2d_ext[0:-2,1:-1]+
+ center_lat2d_ext[1:-1,0:-2]+
+ center_lat2d_ext[0:-2,0:-2])/4.0
+ ll = (center_lat2d_ext[1:-1,1:-1]+
+ center_lat2d_ext[1:-1,0:-2]+
+ center_lat2d_ext[2:,1:-1]+
+ center_lat2d_ext[2:,0:-2])/4.0
+ lr = (center_lat2d_ext[1:-1,1:-1]+
+ center_lat2d_ext[1:-1,2:]+
+ center_lat2d_ext[2:,1:-1]+
+ center_lat2d_ext[2:,2:])/4.0
+
+ # this looks clockwise ordering but it is transposed and becomes counterclockwise, bit-to-bit with NCL
+ corner_lat = da.stack([ul.T.reshape((-1,)).T, ll.T.reshape((-1,)).T, lr.T.reshape((-1,)).T, ur.T.reshape((-1,)).T], axis=1)
+
+ # calculate corner coordinates for longitude, counterclockwise order, imposing Fortran ordering
+ center_lon2d_ext = da.from_array(np.pad(center_lon2d.compute(), (1,1), mode='reflect', reflect_type='odd'))
+
+ ur = (center_lon2d_ext[1:-1,1:-1]+
+ center_lon2d_ext[0:-2,1:-1]+
+ center_lon2d_ext[1:-1,2:]+
+ center_lon2d_ext[0:-2,2:])/4.0
+ ul = (center_lon2d_ext[1:-1,1:-1]+
+ center_lon2d_ext[0:-2,1:-1]+
+ center_lon2d_ext[1:-1,0:-2]+
+ center_lon2d_ext[0:-2,0:-2])/4.0
+ ll = (center_lon2d_ext[1:-1,1:-1]+
+ center_lon2d_ext[1:-1,0:-2]+
+ center_lon2d_ext[2:,1:-1]+
+ center_lon2d_ext[2:,0:-2])/4.0
+ lr = (center_lon2d_ext[1:-1,1:-1]+
+ center_lon2d_ext[1:-1,2:]+
+ center_lon2d_ext[2:,1:-1]+
+ center_lon2d_ext[2:,2:])/4.0
+
+ # this looks clockwise ordering but it is transposed and becomes counterclockwise, bit-to-bit with NCL
+ corner_lon = da.stack([ul.T.reshape((-1,)).T, ll.T.reshape((-1,)).T, lr.T.reshape((-1,)).T, ur.T.reshape((-1,)).T], axis=1)
+
+ return center_lat2d, center_lon2d, corner_lat, corner_lon
+
+def write_to_esmf_mesh(filename, center_lat, center_lon, corner_lat, corner_lon, mask, area=None):
+ """
+ Writes ESMF Mesh to file
+ dask array doesn't support order='F' for Fortran-contiguous (row-major) order
+ the workaround is to arr.T.reshape.T
+ """
+ # create array with unique coordinate pairs
+ # remove coordinates that are shared between the elements
+ corner_pair = da.stack([corner_lon.T.reshape((-1,)).T, corner_lat.T.reshape((-1,)).T], axis=1)
+
+ # REPLACED: corner_pair_uniq = dd.from_dask_array(corner_pair).drop_duplicates().to_dask_array(lengths=True)
+ # following reduces memory by %17
+ corner_pair_uniq = dd.from_dask_array(corner_pair).drop_duplicates().values
+ corner_pair_uniq.compute_chunk_sizes()
+
+ # check size of unique coordinate pairs
+ dims = mask.shape
+ nlon = dims[0]
+ nlat = dims[1]
+ elem_conn_size = nlon*nlat+nlon+nlat+1
+ if corner_pair_uniq.shape[0] != elem_conn_size:
+ print('The size of unique coordinate pairs is {} but expected size is {}!'.format(corner_pair_uniq.shape[0], elem_conn_size))
+ print('Please check the input file or try to force double precision with --double option. Exiting ...')
+ sys.exit(2)
+
+ # create element connections
+ corners = dd.concat([dd.from_dask_array(c) for c in [corner_lon.T.reshape((-1,)).T, corner_lat.T.reshape((-1,)).T]], axis=1)
+ corners.columns = ['lon', 'lat']
+ elem_conn = corners.compute().groupby(['lon','lat'], sort=False).ngroup()+1
+ elem_conn = da.from_array(elem_conn.to_numpy())
+
+ # create new dataset for output
+ out = xr.Dataset()
+
+ out['origGridDims'] = xr.DataArray(np.array(center_lon.shape, dtype=np.int32),
+ dims=('origGridRank'))
+
+ out['nodeCoords'] = xr.DataArray(corner_pair_uniq,
+ dims=('nodeCount', 'coordDim'),
+ attrs={'units': 'degrees'})
+
+ out['elementConn'] = xr.DataArray(elem_conn.T.reshape((4,-1)).T,
+ dims=('elementCount', 'maxNodePElement'),
+ attrs={'long_name': 'Node indices that define the element connectivity'})
+ out.elementConn.encoding = {'dtype': np.int32}
+
+ out['numElementConn'] = xr.DataArray(4*np.ones(center_lon.size, dtype=np.int32),
+ dims=('elementCount'),
+ attrs={'long_name': 'Number of nodes per element'})
+
+ out['centerCoords'] = xr.DataArray(da.stack([center_lon.T.reshape((-1,)).T,
+ center_lat.T.reshape((-1,)).T], axis=1),
+ dims=('elementCount', 'coordDim'),
+ attrs={'units': 'degrees'})
+
+ # add area if it is available
+ if area:
+ out['elementArea'] = xr.DataArray(area.T.reshape((-1,)).T,
+ dims=('elementCount'),
+ attrs={'units': 'radians^2',
+ 'long_name': 'area weights'})
+
+ # add mask
+ out['elementMask'] = xr.DataArray(mask.T.reshape((-1,)).T,
+ dims=('elementCount'),
+ attrs={'units': 'unitless'})
+ out.elementMask.encoding = {'dtype': np.int32}
+
+ # force no '_FillValue' if not specified
+ for v in out.variables:
+ if '_FillValue' not in out[v].encoding:
+ out[v].encoding['_FillValue'] = None
+
+ # add global attributes
+ out.attrs = {'title': 'ESMF unstructured grid file for rectangular grid with {} dimension'.format('x'.join(list(map(str,center_lat.shape)))),
+ 'created_by': os.path.basename(__file__),
+ 'date_created': '{}'.format(datetime.now()),
+ 'conventions': 'ESMFMESH',
+ }
+
+ # write output file
+ if filename is not None:
+ print('Writing {} ...'.format(filename))
+ out.to_netcdf(filename)
+
+def write_to_scrip(filename, center_lat, center_lon, corner_lat, corner_lon, mask, area=None):
+ """
+ Writes SCRIP grid definition to file
+ dask array doesn't support order='F' for Fortran-contiguous (row-major) order
+ the workaround is to arr.T.reshape.T
+ """
+ # create new dataset for output
+ out = xr.Dataset()
+
+ out['grid_dims'] = xr.DataArray(np.array(center_lat.shape, dtype=np.int32),
+ dims=('grid_rank',))
+ out.grid_dims.encoding = {'dtype': np.int32}
+
+ out['grid_center_lat'] = xr.DataArray(center_lat.T.reshape((-1,)).T,
+ dims=('grid_size'),
+ attrs={'units': 'degrees'})
+
+ out['grid_center_lon'] = xr.DataArray(center_lon.T.reshape((-1,)).T,
+ dims=('grid_size'),
+ attrs={'units': 'degrees'})
+
+ out['grid_corner_lat'] = xr.DataArray(corner_lat.T.reshape((4, -1)).T,
+ dims=('grid_size','grid_corners'),
+ attrs={'units': 'degrees'})
+
+ out['grid_corner_lon'] = xr.DataArray(corner_lon.T.reshape((4, -1)).T,
+ dims=('grid_size','grid_corners'),
+ attrs={'units': 'degrees'})
+
+ # include area if it is available
+ if area:
+ out['grid_area'] = xr.DataArray(area.T.reshape((-1,)).T,
+ dims=('grid_size'),
+ attrs={'units': 'radians^2',
+ 'long_name': 'area weights'})
+
+ out['grid_imask'] = xr.DataArray(mask.T.reshape((-1,)).T,
+ dims=('grid_size'),
+ attrs={'units': 'unitless'})
+ out.grid_imask.encoding = {'dtype': np.int32}
+
+ # force no '_FillValue' if not specified
+ for v in out.variables:
+ if '_FillValue' not in out[v].encoding:
+ out[v].encoding['_FillValue'] = None
+
+ # add global attributes
+ out.attrs = {'title': 'Rectangular grid with {} dimension'.format('x'.join(list(map(str,center_lat.shape)))),
+ 'created_by': os.path.basename(__file__),
+ 'date_created': '{}'.format(datetime.now()),
+ 'conventions': 'SCRIP',
+ }
+
+ # write output file
+ if filename is not None:
+ print('Writing {} ...'.format(filename))
+ out.to_netcdf(filename)
+
+
+def file_type(x):
+ if x.lower() == 'scrip' or x.lower() == 'esmf':
+ return x
+ else:
+ raise argparse.ArgumentTypeError('SCRIP or ESMF value expected for output type.')
+
+#@profile
+def main(argv):
+ """
+ Main driver to calculate and write SCRIP and ESMF formatted grid represenation
+ """
+ # set defaults for command line arguments
+ ifile = ''
+ ofile = ''
+ oformat = 'ESMF'
+ overwrite = False
+ flip = False
+ latrev = False
+ latvar = 'lat'
+ lonvar = 'lon'
+ maskvar = 'mask'
+ maskcal = False
+ addarea = False
+ double = False
+
+ # read command line arguments
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--ifile' , help='Input grid file name', required=True)
+ parser.add_argument('--ofile' , help='Output file name', required=True)
+ parser.add_argument('--oformat' , help='Output data format [SCRIP, ESMF], defaults to ESMF', required=False, type=file_type, nargs='?', const='ESMF')
+ parser.add_argument('--overwrite', help='Overwrites output file, defaults to not', required=False, action='store_true')
+ parser.add_argument('--flip' , help='Flip mask values. SCRIP requires 0/land and 1/ocean', required=False, action='store_true')
+ parser.add_argument('--latrev' , help='Reverse latitude axis', required=False, action='store_true')
+ parser.add_argument('--latvar' , help='Name of latitude variable, defults to ''lat''', required=False, nargs='?', const='lat')
+ parser.add_argument('--lonvar' , help='Name of longitude variable, defaults to ''lon''', nargs='?', const='lon')
+ parser.add_argument('--maskvar' , help='Name of mask variable, defaults to ''mask''', nargs='?', const='mask')
+ parser.add_argument('--maskcal' , help='Calculate mask using fill value from variable defined in maskvar - 0/land and 1/ocean', required=False, action='store_true')
+ parser.add_argument('--addarea' , help='Add area field to output file, defaults to not', required=False, action='store_true')
+ parser.add_argument('--double' , help='Double precision output, defaults to float', required=False, action='store_true')
+ args = parser.parse_args()
+
+ if args.ifile:
+ ifile = args.ifile
+ if args.ofile:
+ ofile = args.ofile
+ if args.oformat:
+ oformat = args.oformat
+ if args.overwrite:
+ overwrite = args.overwrite
+ if args.flip:
+ flip = args.flip
+ if args.latrev:
+ latrev = args.latrev
+ if args.latvar:
+ latvar = args.latvar
+ if args.lonvar:
+ lonvar = args.lonvar
+ if args.maskvar:
+ maskvar = args.maskvar
+ if args.maskcal:
+ maskcal = args.maskcal
+ if not args.maskvar:
+ print('maskcal argument requires maskvar to calculate mask! exiting ...')
+ sys.exit()
+ if args.addarea:
+ addarea = args.addarea
+ if args.double:
+ double = args.double
+
+ # print out configuration
+ print("Configuration:")
+ print("ifile = {}".format(ifile))
+ print("ofile = {}".format(ofile))
+ print("oformat = {}".format(oformat))
+ print("overwrite = {}".format(overwrite))
+ print("flip = {}".format(flip))
+ print("latrev = {}".format(latrev))
+ print("latvar = {}".format(latvar))
+ print("lonvar = {}".format(lonvar))
+ print("maskvar = {}".format(maskvar))
+ print("maskcal = {} ({})".format(maskcal, maskvar))
+ print("addarea = {}".format(addarea))
+ print("double = {}".format(double))
+
+ # open file, transpose() fixes dimension ordering and mimic Fortran
+ if os.path.isfile(ifile):
+ ds = xr.open_dataset(ifile, mask_and_scale=False, decode_times=False).transpose()
+ else:
+ print('Input file could not find!')
+ sys.exit(2)
+
+ # check output file
+ if overwrite:
+ if os.path.isfile(ofile):
+ print('Removing existing output file {}.'.format(ofile))
+ os.remove(ofile)
+ else:
+ if os.path.isfile(ofile):
+ print('Output file exists. Please provide --overwrite flag.')
+ sys.exit(2)
+
+ # check coordinate variables
+ if latvar not in ds.coords and latvar not in ds.data_vars:
+ print('Input file does not have variable named {}.'.format(latvar))
+ print('File has following {}'.format(ds.coords))
+ print('File has following {}'.format(ds.data_vars))
+ sys.exit(2)
+
+ if lonvar not in ds.coords and lonvar not in ds.data_vars:
+ print('Input file does not have variable named {}.'.format(latvar))
+ print('File has following {}'.format(ds.coords))
+ print('File has following {}'.format(ds.data_vars))
+ sys.exit(2)
+
+ # remove time dimension from coordinate variables
+ hasTime = 'time' in ds[latvar].dims
+ if hasTime:
+ lat = ds[latvar][:,:,0]
+ else:
+ lat = ds[latvar]
+
+ hasTime = 'time' in ds[lonvar].dims
+ if hasTime:
+ lon = ds[lonvar][:,:,0]
+ else:
+ lon = ds[lonvar]
+
+ # reverse latitude dimension
+ if latrev:
+ lat_name = [x for x in lat.coords.dims if 'lat' in x]
+ if lat_name:
+ lat = lat.reindex({lat_name[0]: list(reversed(lat[lat_name[0]]))})
+
+ # remove time dimension from mask variable and optionally flip mask values
+ # this will also create artifical mask variable with all ones, if it is required
+ if maskvar in ds.data_vars:
+ print('Using mask values from the file.')
+
+ # check mask has time dimension or not
+ hasTime = 'time' in ds[maskvar].dims
+ if hasTime:
+ mask = ds[maskvar][:,:,0]
+ else:
+ mask = ds[maskvar][:]
+
+ # use variable to construct mask information
+ if maskcal:
+ fill_value = None
+ if '_FillValue' in mask.attrs:
+ fill_value = mask._FillValue
+ elif 'missing_value' in mask.attrs:
+ fill_value = mask.missing_value
+
+ if fill_value:
+ mask = da.from_array(xr.where(mask == fill_value, 0, 1).astype(dtype=np.int8))
+ else:
+ print('Using artifical generated mask values, that are ones in everywhere.')
+ if len(lat.dims) == 1:
+ mask = da.from_array(np.ones((next(iter(lon.sizes.values())), next(iter(lat.sizes.values()))), dtype=np.int8))
+ else:
+ mask = da.from_array(np.ones(tuple(lat.sizes.values()), dtype=np.int8))
+
+ # flip mask values
+ if flip:
+ print('Flipping mask values to 0 for land and 1 for ocean')
+ mask = xr.where(mask > 0, 0, 1)
+
+ # calculate corner coordinates, center coordinates are converted to 2d if it is 1d
+ if double:
+ center_lat, center_lon, corner_lat, corner_lon = calculate_corners(lat.astype(np.float64, copy=False), lon.astype(np.float64, copy=False))
+ else:
+ center_lat, center_lon, corner_lat, corner_lon = calculate_corners(lat, lon)
+
+ # TODO: add support to calculate area
+ if addarea:
+ print('The area calculation is not supported! --addarea is reserved for future use.')
+
+ # create output file
+ if oformat.lower() == 'scrip':
+ write_to_scrip(ofile, center_lat, center_lon, corner_lat, corner_lon, mask)
+ else:
+ write_to_esmf_mesh(ofile, center_lat, center_lon, corner_lat, corner_lon, mask)
+
+if __name__== "__main__":
+ main(sys.argv[1:])
diff --git a/ush/cdeps_utils/hafs_ghrsst_download.py b/ush/cdeps_utils/hafs_ghrsst_download.py
new file mode 100755
index 000000000..9f8cded7b
--- /dev/null
+++ b/ush/cdeps_utils/hafs_ghrsst_download.py
@@ -0,0 +1,223 @@
+#! /usr/bin/env python3
+
+# This next line will abort in any version earlier than Python 3.6:
+f'This script requires Python 3.6 or newer.'
+
+import time
+import subprocess
+import contextlib
+import os
+import tempfile
+import getopt
+import re
+import logging
+import datetime
+import sys
+import random
+
+try:
+ import requests
+except ImportError as ie:
+ sys.stderr.write("""You are missing the request module!
+You must install it to run this script.
+
+ pip install request --user
+""")
+
+import produtil.setup, produtil.fileop, produtil.locking
+
+# Constants
+UTILITY_NAME = 'hafs_ghrsst_download'
+VERSION_STRING = '0.0.1'
+LOGGING_DOMAIN = UTILITY_NAME
+CYCLING_INTERVAL = datetime.timedelta(seconds=3600*24)
+EPSILON = datetime.timedelta(seconds=5) # epsilon for time comparison: five seconds
+
+# Non-constant globals:
+filename_format="JPL-L4_GHRSST-SSTfnd-MUR-GLOB-%Y%m%d"
+dayset=set() # list of YYYYMMDD strings
+happy=True # False = something failed
+url_after_change='https://www.ncei.noaa.gov/data/oceans/ghrsst/L4/GLOB/JPL/MUR25/%Y/%j/%Y%m%d090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc'
+url_before_change='https://www.ncei.noaa.gov/data/oceans/ghrsst/L4/GLOB/JPL/MUR25/%Y/%j/%Y%m%d-JPL-L4UHfnd-GLOB-v01-fv04-MUR.nc.bz2'
+date_url_changed=datetime.datetime.strptime("2009309","%Y%j")
+block_size=65536
+
+def usage(why=None):
+ print(f'''Synopsis: {UTILITY_NAME} [options] day [day [...]]
+
+Downloads the listed days of data. Days can be specified as:
+ 20210815 = specify one day: August 15, 2021
+ 20210815-20210819 = specify a range of days: August 15th to 19th, 2021
+ 2018 = specify an entire year (2018)
+
+Options:
+ -q | --quiet = log only warnings and errors
+ -v | --verbose = log all messages
+ -F format | --format format = filename format as in strftime(3)
+ -b N | --block-size N = bytes to download in each block (default {block_size})
+ --version = print {UTILITY_NAME} {VERSION_STRING}
+ --help = this message
+
+Format example: stuffnthings_%Y%m%d = stuffnthings_20210815
+Script will automatically append ".nc" or ".nc.bz2"
+''')
+ if why:
+ sys.stderr.write(f'SCRIPT IS ABORTING BECAUSE: {why}\n')
+ return 1
+ return 0
+
+def quiet_remove(filename):
+ with contextlib.suppress(FileNotFoundError):
+ os.remove(filename)
+
+class RequestFailed(Exception):
+ def __init__(self,url,code):
+ self.url=str(url)
+ self.code=code
+ def __str__(self):
+ return f'requests.get("{self.url!s}") failed with code {self.code!s}'
+ def __repr__(self):
+ return f'RequestFailed({self.url!r},{self.code!r})'
+
+# The meat of the program: retrieve a file
+def download_one_day(when):
+ filename_base = when.strftime(filename_format)
+ filename_lock = filename_base+'.lock'
+
+ if when>=date_url_changed:
+ url=when.strftime(url_after_change)
+ else:
+ url=when.strftime(url_before_before)
+
+ if url.endswith('.bz2'):
+ filename_download = filename_base+'.download.nc.bz2'
+ filename_final = filename_base+'.nc.bz2'
+ else:
+ filename_download = filename_base+'.download.nc'
+ filename_final = filename_base+'.nc'
+
+ if os.path.exists(filename_final):
+ logger.info(filename_final+': already exists. Skipping.')
+ return True
+
+ request = None
+
+ with produtil.locking.LockFile(filename_lock,logger):
+ try:
+ if os.path.exists(filename_final):
+ logger.info(filename_final+': already exists (after lock). Skipping. Will pick a random date to avoid lock contention..')
+ return False
+ with open(filename_download,'wb') as downloaded:
+ logger.info(filename_download+' <-- '+str(url))
+ request = requests.get(url)
+ if request.status_code!=200:
+ raise RequestFailed(url,request.status_code)
+ for chunk in request.iter_content(block_size):
+ downloaded.write(chunk)
+ request.close()
+ produtil.fileop.deliver_file(filename_download,filename_final,logger=logger,
+ keep=False,verify=False,moveok=True,force=True)
+ quiet_remove(filename_download)
+ quiet_remove(filename_lock)
+ except Exception as e:
+ quiet_remove(filename_download)
+ if request is not None:
+ request.close()
+ raise e
+ return True
+
+# Parse arguments and initialize logging:
+log_level = logging.INFO
+optlist,args = getopt.getopt(sys.argv[1:],'qvb:F:',[
+ 'version','help','verbose','quiet','block-size','format'])
+if len(args)<1:
+ exit(usage("No arguments provided!"))
+for optarg in optlist:
+ if optarg[0] in ['-q', '--quiet']:
+ log_level = logging.WARNING
+ elif optarg[0] in ['-v', '--verbose']:
+ log_level = logging.DEBUG
+ elif optarg[0] in ['-F', '--format']:
+ filename_format = optarg[1]
+ elif optarg[0] in ['-b', '--block-size' ]:
+ block_size=max(1,int(optarg[1]))
+ elif optarg[0]=='--help':
+ exit(usage())
+ elif optarg[0]=='--version':
+ print(UTILITY_NAME+' '+VERSION_STRING)
+ exit(0)
+logger = logging.getLogger(LOGGING_DOMAIN)
+produtil.setup.setup(level=log_level,send_dbn=False)
+
+# Parse the days. This loop was modified from run_hafs.py:
+for arg in args:
+ if re.match('\A\d{8}\Z',arg):
+ logger.info('single date/time')
+ # Single date/time
+ dayset.add(arg)
+ elif re.match('\A\d{4}\Z',arg):
+ logger.info('year')
+ # Year
+ start=datetime.datetime(int(arg,10),1,1,0,0,0)
+ end=datetime.datetime(int(arg,10),12,31,23,59,0)
+ now=start
+ while now=len(daylist):
+ logger.info(f'{day}: sleep for a little while... 30 second snooze...')
+ time.sleep(30)
+ logger.info(f'{day}: done sleeping.')
+ iloop=0
+ except Exception as ex: # Unfortunately, cdsapi raises Exception
+ happy = False
+ logger.error(f'Download failed for {day}: {ex}',exc_info=ex)
+
+# Exit 0 on success, 1 on failure:
+exit( 0 if happy else 1 )
diff --git a/ush/cdeps_utils/hafs_ghrsst_prep.sh b/ush/cdeps_utils/hafs_ghrsst_prep.sh
new file mode 100755
index 000000000..50d436f26
--- /dev/null
+++ b/ush/cdeps_utils/hafs_ghrsst_prep.sh
@@ -0,0 +1,89 @@
+#!/bin/bash
+
+set -xe
+
+set -u
+output_path="$1"
+set +u
+
+if ( ! which cdo ) ; then
+ echo "The \"cdo\" command isn't in your path! Go find it and rerun this job." 1>&2
+ exit 1
+fi
+
+if ( ! which ncks ) ; then
+ echo "The \"ncks\" command from the NetCDF Data Operators (nco) is not in your path! Go find the nco and rerun this job." 1>&2
+ exit 1
+fi
+
+HOMEhafs=${HOMEhafs:-/gpfs/hps3/emc/hwrf/noscrub/${USER}/save/HAFS}
+WORKhafs=${WORKhafs:-/gpfs/hps3/ptmp/${USER}/${SUBEXPT}/${CDATE}/${STORMID}}
+USHhafs=${USHhafs:-${HOMEhafs}/ush}
+CDATE=${CDATE:-${YMDH}}
+
+# Start & end times are at day precision, not hour
+m1date=$( date -d "${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}t${CDATE:8:2}:00:00+00 -24 hours" +%Y%m%d )
+p1date=$( date -d "${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}t${CDATE:8:2}:00:00+00 +$(( NHRS+24 )) hours" +%Y%m%d )
+now=$m1date
+end=$p1date
+
+set +x
+echo "Generating ESMF mesh from GHRSST files."
+echo "Running in dir \"$PWD\""
+echo "Using files from \"$DOCNdir\""
+echo "GHRSST Date range is $now to $end"
+set -x
+
+# Generate the filenames.
+missing=''
+usefiles=''
+itime=0
+infinity=9999 # infinite loop guard
+while (( now <= end && itime < infinity )) ; do
+ infile="$DOCNdir/JPL-L4_GHRSST-SSTfnd-MUR-GLOB-${now:0:8}.nc"
+ if [[ ! -s "$infile" || ! -r "$infile" ]] ; then
+ echo "GHRSST input file is missing: $infile" 2>&1
+ missing="$missing $infile"
+ else
+ usefiles="$mergefiles $infile"
+
+ # Discard all vars except what we need; convert to NetCDF3:
+ rm -f vars.nc
+ ncks -v time,lat,lon,analysed_sst -6 "$infile" vars.nc
+
+ # Subset data over HAFS region (lat, 250 to 355 and lon, 0 to 50)
+ rm -f subset.nc
+ cdo -sellonlatbox,-118,-5,-15.0,60.0 vars.nc subset.nc
+
+ # Convert temperature units:
+ aos_old=`ncdump -c subset.nc | grep add_offset | grep analysed_sst | awk '{print $3}'`
+ aos_new=$(echo "scale=3; $aos_old-273.15" | bc)
+ ncatted -O -a add_offset,analysed_sst,o,f,"$aos_new" subset.nc
+
+ outfile=$( printf "%s/DOCN_input_%05d.nc" "$output_path" $itime )
+ $USHhafs/produtil_deliver.py -m subset.nc "$outfile"
+ fi
+ now=$( date -d "${now:0:4}-${now:4:2}-${now:6:2}t00:00:00+00 +24 hours" +%Y%m%d )
+ itime=$(( itime+1 ))
+done
+if (( itime >= infinity )) ; then
+ echo "Infinite loop detected! The \"date\" command did not behave as expected. Aborting!" 1>&2
+ exit 1
+fi
+
+if [[ "${missing:-}Q" != Q ]] ; then
+ set +x
+ echo "You are missing some GHRSST input files!"
+ for infile in $missing ; do
+ echo " missing: $infile"
+ done
+ echo " -> SCRIPT IS ABORTING BECAUSE INPUT FILES ARE MISSING <- "
+ exit 1
+fi
+
+#ncks -O -d lon,6999,17499 -d lat,8999,13999 "$merged" ghrsst_v1.nc
+
+# Rejoice.
+set +x
+echo "Successfully subsetted and corrected units of GHRSST files."
+echo "Please enjoy your files and have a nice day."
diff --git a/ush/cdeps_utils/hafs_oisst_download.py b/ush/cdeps_utils/hafs_oisst_download.py
new file mode 100755
index 000000000..cd1afca99
--- /dev/null
+++ b/ush/cdeps_utils/hafs_oisst_download.py
@@ -0,0 +1,217 @@
+#! /usr/bin/env python3
+
+# This next line will abort in any version earlier than Python 3.6:
+f'This script requires Python 3.6 or newer.'
+
+import time
+import subprocess
+import contextlib
+import os
+import tempfile
+import getopt
+import re
+import logging
+import datetime
+import sys
+import random
+
+try:
+ import requests
+except ImportError as ie:
+ sys.stderr.write("""You are missing the request module!
+You must install it to run this script.
+
+ pip install request --user
+""")
+
+import produtil.setup, produtil.fileop, produtil.locking
+
+# Constants
+UTILITY_NAME = 'hafs_oisst_download'
+VERSION_STRING = '0.0.1'
+LOGGING_DOMAIN = UTILITY_NAME
+CYCLING_INTERVAL = datetime.timedelta(seconds=3600*24)
+EPSILON = datetime.timedelta(seconds=5) # epsilon for time comparison: five seconds
+
+# Non-constant globals:
+dayset=set() # list of YYYYMMDD strings
+happy=True # False = something failed
+filename_format = 'oisst-avhrr-v02r01.%Y%m%d'
+base_url='https://www.ncei.noaa.gov/data/sea-surface-temperature-optimum-interpolation/v2.1/access/avhrr'
+block_size=65536
+
+def usage(why=None):
+ print(f'''Synopsis: {UTILITY_NAME} [options] day [day [...]]
+
+Downloads the listed days of data. Days can be specified as:
+ 20210815 = specify one day: August 15, 2021
+ 20210815-20210819 = specify a range of days: August 15th to 19th, 2021
+ 2018 = specify an entire year (2018)
+
+Options:
+ -q | --quiet = log only warnings and errors
+ -v | --verbose = log all messages
+ -u https:... | --url https:... = base url with no ending /
+ default: {base_url}
+ -F format | --format format = filename format as in strftime(3)
+ -b N | --block-size N = bytes to download in each block (default {block_size})
+ --version = print {UTILITY_NAME} {VERSION_STRING}
+ --help = this message
+
+Format example: stuffnthings_%Y%m%d = stuffnthings_20210815
+Script will automatically append ".nc"
+''')
+ if why:
+ sys.stderr.write(f'SCRIPT IS ABORTING BECAUSE: {why}\n')
+ return 1
+ return 0
+
+def quiet_remove(filename):
+ with contextlib.suppress(FileNotFoundError):
+ os.remove(filename)
+
+class RequestFailed(Exception):
+ def __init__(self,url,code):
+ self.url=str(url)
+ self.code=code
+ def __str__(self):
+ return f'requests.get("{self.url!s}") failed with code {self.code!s}'
+ def __repr__(self):
+ return f'RequestFailed({self.url!r},{self.code!r})'
+
+# The meat of the program: retrieve a file
+def download_one_day(when):
+ filename_base = when.strftime(filename_format)
+ filename_final = filename_base+'.nc'
+ if os.path.exists(filename_final):
+ logger.info(filename_final+': already exists. Skipping.')
+ return True
+
+ filename_download = filename_base+'_download.nc'
+ filename_lock = filename_base+'.lock'
+ request = None
+ yyyymm="%04d%02d"%(when.year,when.month)
+ yyyymmdd="%04d%02d%02d"%(when.year,when.month,when.day)
+ url=f'{base_url}/{yyyymm}/oisst-avhrr-v02r01.{yyyymmdd}.nc'
+ with produtil.locking.LockFile(filename_lock,logger):
+ try:
+ if os.path.exists(filename_final):
+ logger.info(filename_final+': already exists (after lock). Skipping. Will pick a random date to avoid lock contention..')
+ return False
+ with open(filename_download,'wb') as downloaded:
+ logger.info(filename_download+' <-- '+str(url))
+ request = requests.get(url)
+ if request.status_code!=200:
+ raise RequestFailed(url,request.status_code)
+ for chunk in request.iter_content(block_size):
+ downloaded.write(chunk)
+ request.close()
+ produtil.fileop.deliver_file(filename_download,filename_final,logger=logger,
+ keep=False,verify=False,moveok=True,force=True)
+ quiet_remove(filename_download)
+ quiet_remove(filename_lock)
+ except Exception as e:
+ quiet_remove(filename_download)
+ if request is not None:
+ request.close()
+ raise e
+ return True
+
+# Parse arguments and initialize logging:
+log_level = logging.INFO
+optlist,args = getopt.getopt(sys.argv[1:],'qveniu:b:F:',[
+ 'version','help','verbose','quiet','block-size','url','format'])
+if len(args)<1:
+ exit(usage("No arguments provided!"))
+for optarg in optlist:
+ if optarg[0] in ['-q', '--quiet']:
+ log_level = logging.WARNING
+ elif optarg[0] in ['-v', '--verbose']:
+ log_level = logging.DEBUG
+ elif optarg[0] in ['-F', '--format']:
+ filename_format = optarg[1]
+ elif optarg[0] in ['-u', '--url' ]:
+ base_url=optarg[1]
+ elif optarg[0] in ['-b', '--block-size' ]:
+ block_size=max(1,int(optarg[1]))
+ elif optarg[0]=='--help':
+ exit(usage())
+ elif optarg[0]=='--version':
+ print(UTILITY_NAME+' '+VERSION_STRING)
+ exit(0)
+logger = logging.getLogger(LOGGING_DOMAIN)
+
+produtil.setup.setup(level=log_level,send_dbn=False)
+
+# Parse the days. This loop was modified from run_hafs.py:
+for arg in args:
+ if re.match('\A\d{8}\Z',arg):
+ logger.info('single date/time')
+ # Single date/time
+ dayset.add(arg)
+ elif re.match('\A\d{4}\Z',arg):
+ logger.info('year')
+ # Year
+ start=datetime.datetime(int(arg,10),1,1,0,0,0)
+ end=datetime.datetime(int(arg,10),12,31,23,59,0)
+ now=start
+ while now=len(daylist):
+ logger.info(f'{day}: sleep for a little while... 30 second snooze...')
+ time.sleep(30)
+ logger.info(f'{day}: done sleeping.')
+ iloop=0
+ except Exception as ex: # Unfortunately, cdsapi raises Exception
+ happy = False
+ logger.error(f'Download failed for {day}: {ex}',exc_info=ex)
+
+# Exit 0 on success, 1 on failure:
+exit( 0 if happy else 1 )
diff --git a/ush/cdeps_utils/hafs_oisst_prep.sh b/ush/cdeps_utils/hafs_oisst_prep.sh
new file mode 100755
index 000000000..bca9f51ce
--- /dev/null
+++ b/ush/cdeps_utils/hafs_oisst_prep.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+
+set -xe
+
+set -u
+output_path="$1"
+set +u
+
+if ( ! which cdo ) ; then
+ set +x
+ echo "The \"cdo\" command isn't in your path! Go find it and rerun this job." 1>&2
+ set -x
+ exit 1
+fi
+
+if ( ! which ncwa ) ; then
+ set +x
+ echo "The \"ncwa\" command from the NetCDF Data Operators (nco) is not in your path! Go find the nco and rerun this job." 1>&2
+ set -x
+ exit 1
+fi
+
+HOMEhafs=${HOMEhafs:-/gpfs/hps3/emc/hwrf/noscrub/${USER}/save/HAFS}
+WORKhafs=${WORKhafs:-/gpfs/hps3/ptmp/${USER}/${SUBEXPT}/${CDATE}/${STORMID}}
+USHhafs=${USHhafs:-${HOMEhafs}/ush}
+CDATE=${CDATE:-${YMDH}}
+
+set -u
+
+# Start & end times are at day precision, not hour
+m1date=$( date -d "${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}t${CDATE:8:2}:00:00+00 -24 hours" +%Y%m%d )
+p1date=$( date -d "${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}t${CDATE:8:2}:00:00+00 +$(( NHRS+24 )) hours" +%Y%m%d )
+now=$m1date
+end=$p1date
+merged=oisst-avhrr-v02r01.merged.nc
+nozlev=oisst-avhrr-v02r01.nozlev.nc
+outfile=$( printf "%s/DOCN_input_%05d.nc" "$output_path" 0 )
+
+set +x
+echo "Generating ESMF mesh from OISST files."
+echo "Running in dir \"$PWD\""
+echo "OISST Date range is $now to $end"
+set -x
+
+rm -f DOCN_input* merged.nc
+
+# Generate the filenames.
+usefiles=''
+missing=''
+itime=0
+infinity=9999 # infinite loop guard
+while (( now <= end && itime < infinity )) ; do
+ infile="$DOCNdir/oisst-avhrr-v02r01.${now:0:8}.nc"
+ if [[ ! -s "$infile" || ! -r "$infile" ]] ; then
+ echo "OISST input file is missing: $infile" 2>&1
+ missing="$missing $infile"
+ else
+ usefiles="$usefiles $infile"
+ fi
+ now=$( date -d "${now:0:4}-${now:4:2}-${now:6:2}t00:00:00+00 +24 hours" +%Y%m%d )
+ itime=$(( itime+1 ))
+done
+if (( itime >= infinity )) ; then
+ echo "Infinite loop detected! The \"date\" command did not behave as expected. Aborting!" 1>&2
+ exit 1
+fi
+
+if [[ "${missing:-}Q" != Q ]] ; then
+ set +x
+ echo "You are missing some OISST input files!"
+ for infile in $missing ; do
+ echo " missing: $infile"
+ done
+ echo " -> SCRIPT IS ABORTING BECAUSE INPUT FILES ARE MISSING <- "
+ exit 1
+fi
+
+set +x
+echo "OISST input files are:"
+for f in $usefiles ; do
+ echo " - $f"
+done
+echo "Will merge oisst files into $merged"
+echo "Will remove z levels into $nozlev"
+set -x
+
+# Merge all oisst files into one, as expected by hafs_esmf_mesh.py
+cdo mergetime $usefiles "$merged"
+test -s "$merged"
+test -r "$merged"
+
+# Remove z dimension
+ncwa -O -a zlev "$merged" "$nozlev"
+test -s "$nozlev"
+test -r "$nozlev"
+
+# Deliver to intercom:
+$USHhafs/produtil_deliver.py -m "$nozlev" "$outfile"
+
+# Rejoice.
+set +x
+echo "Successfully merged OISST files and removed z dimension."
+echo "Merged file is at: $outfile"
+echo "Please enjoy your files and have a nice day."
diff --git a/ush/cdeps_utils/hafs_rtofs_download.py b/ush/cdeps_utils/hafs_rtofs_download.py
new file mode 100755
index 000000000..0dde0a3e4
--- /dev/null
+++ b/ush/cdeps_utils/hafs_rtofs_download.py
@@ -0,0 +1,230 @@
+#! /usr/bin/env python3
+
+# This next line will abort in any version earlier than Python 3.6:
+f'This script requires Python 3.6 or newer.'
+
+import time
+import subprocess
+import contextlib
+import os
+import tempfile
+import getopt
+import re
+import logging
+import datetime
+import sys
+import random
+
+try:
+ import requests
+except ImportError as ie:
+ sys.stderr.write("""You are missing the "request" module!
+You must install it to run this script.
+
+ pip install request --user
+""")
+
+import produtil.setup, produtil.fileop, produtil.locking
+
+# Constants
+UTILITY_NAME = 'hafs_rtofs_download'
+VERSION_STRING = '0.0.1'
+LOGGING_DOMAIN = UTILITY_NAME
+CYCLING_INTERVAL = datetime.timedelta(seconds=3600*24)
+EPSILON = datetime.timedelta(seconds=5) # epsilon for time comparison: five seconds
+
+# Non-constant globals:
+dayset=set() # list of YYYYMMDD strings
+happy=True # False = something failed
+filename_format = 'rtofs_glo_2ds_%Y%m%d_f{fhour:03d}'
+url_format='https://nomads.ncep.noaa.gov/pub/data/nccf/com/rtofs/prod/rtofs.%Y%m%d/rtofs_glo_2ds_f{fhour:03d}_prog.nc'
+block_size=65536
+last_fhour=126
+fhour_interval=3
+
+def usage(why=None):
+ print(f'''Synopsis: {UTILITY_NAME} [options] day [day [...]]
+
+Downloads the listed days of data. Days can be specified as:
+ 20210815 = specify one day: August 15, 2021
+ 20210815-20210819 = specify a range of days: August 15th to 19th, 2021
+ 2018 = specify an entire year (2018)
+
+Options:
+ -q | --quiet = log only warnings and errors
+ -v | --verbose = log all messages
+ -u https:... | --url https:... = base url with no ending /
+ default: {base_url}
+ -F format | --format format = filename format as in strftime(3)
+ -b N | --block-size N = bytes to download in each block (default {block_size})
+ --version = print {UTILITY_NAME} {VERSION_STRING}
+ --help = this message
+
+Format example: stuffnthings_%Y%m%d = stuffnthings_20210815
+Script will automatically append ".nc"
+''')
+
+ if why:
+ sys.stderr.write(f'SCRIPT IS ABORTING BECAUSE: {why}\n')
+ return 1
+ return 0
+
+def quiet_remove(filename):
+ with contextlib.suppress(FileNotFoundError):
+ os.remove(filename)
+
+class RequestFailed(Exception):
+ def __init__(self,url,code):
+ self.url=str(url)
+ self.code=code
+ def __str__(self):
+ return f'requests.get("{self.url!s}") failed with code {self.code!s}'
+ def __repr__(self):
+ return f'RequestFailed({self.url!r},{self.code!r})'
+
+# The meat of the program: retrieve a file
+def download_one_hour(when,fhour):
+ filename_base = when.strftime(filename_format.format(fhour=fhour))
+ url = when.strftime(url_format.format(fhour=fhour))
+ filename_final = filename_base+'.nc'
+ if os.path.exists(filename_final):
+ logger.info(filename_final+': already exists. Skipping.')
+ return True
+
+ filename_download = filename_base+'_download.nc'
+ filename_lock = filename_base+'.lock'
+ request = None
+ with produtil.locking.LockFile(filename_lock,logger):
+ try:
+ if os.path.exists(filename_final):
+ logger.info(filename_final+': already exists (after lock). Skipping. Will pick a random date to avoid lock contention..')
+ return False
+ with open(filename_download,'wb') as downloaded:
+ logger.info(filename_download+' <-- '+str(url))
+ request = requests.get(url)
+ if request.status_code!=200:
+ raise RequestFailed(url,request.status_code)
+ for chunk in request.iter_content(block_size):
+ downloaded.write(chunk)
+ request.close()
+ produtil.fileop.deliver_file(filename_download,filename_final,logger=logger,
+ keep=False,verify=False,moveok=True,force=True)
+ quiet_remove(filename_download)
+ quiet_remove(filename_lock)
+ except Exception as e:
+ quiet_remove(filename_download)
+ if request is not None:
+ request.close()
+ raise e
+ return True
+# Parse arguments and initialize logging:
+log_level = logging.INFO
+optlist,args = getopt.getopt(sys.argv[1:],'qveniu:b:F:',[
+ 'version','help','verbose','quiet','block-size','url','format'])
+if len(args)<1:
+ exit(usage("No arguments provided!"))
+for optarg in optlist:
+ if optarg[0] in ['-q', '--quiet']:
+ log_level = logging.WARNING
+ elif optarg[0] in ['-v', '--verbose']:
+ log_level = logging.DEBUG
+ elif optarg[0] in ['-F', '--format']:
+ filename_format = optarg[1]
+ elif optarg[0] in ['-u', '--url' ]:
+ base_url=optarg[1]
+ elif optarg[0] in ['-b', '--block-size' ]:
+ block_size=max(1,int(optarg[1]))
+ elif optarg[0]=='--help':
+ exit(usage())
+ elif optarg[0]=='--version':
+ print(UTILITY_NAME+' '+VERSION_STRING)
+ exit(0)
+logger = logging.getLogger(LOGGING_DOMAIN)
+
+produtil.setup.setup(level=log_level,send_dbn=False)
+
+# Parse the days. This loop was modified from run_hafs.py:
+for arg in args:
+ if re.match('\A\d{8}\Z',arg):
+ logger.info('single date/time')
+ # Single date/time
+ dayset.add(arg)
+ elif re.match('\A\d{4}\Z',arg):
+ logger.info('year')
+ # Year
+ start=datetime.datetime(int(arg,10),1,1,0,0,0)
+ end=datetime.datetime(int(arg,10),12,31,23,59,0)
+ now=start
+ while now=len(hourlist):
+ logger.info(f'{day}: sleep for a little while... 30 second snooze...')
+ time.sleep(30)
+ logger.info(f'{day}: done sleeping.')
+ iloop=0
+ except Exception as ex: # Unfortunately, cdsapi raises Exception
+ happy = False
+ logger.error(f'Download failed for {day}: {ex}',exc_info=ex)
+
+# Exit 0 on success, 1 on failure:
+exit( 0 if happy else 1 )
diff --git a/ush/cdeps_utils/hafs_rtofs_prep.sh b/ush/cdeps_utils/hafs_rtofs_prep.sh
new file mode 100644
index 000000000..e31515af0
--- /dev/null
+++ b/ush/cdeps_utils/hafs_rtofs_prep.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+set -xe
+
+merged="${1:-merged.nc}"
+
+for exe in cdo ncks ncrename ncap2 ; do
+ if ( ! which "$exe" ) ; then
+ echo "The \"$exe\" command isn't in your path! Go find it and rerun this job." 1>&2
+ exit 1
+ fi
+done
+
+HOMEhafs=${HOMEhafs:-/gpfs/hps3/emc/hwrf/noscrub/${USER}/save/HAFS}
+WORKhafs=${WORKhafs:-/gpfs/hps3/ptmp/${USER}/${SUBEXPT}/${CDATE}/${STORMID}}
+USHhafs=${USHhafs:-${HOMEhafs}/ush}
+CDATE=${CDATE:-${YMDH}}
+
+mesh_ocn="$mesh_ocn"
+mesh_dir=$( dirname "$mesh_ocn" )
+
+# Start & end times are at day precision, not hour
+m1date=$( date -d "${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}t${CDATE:8:2}:00:00+00 -24 hours" +%Y%m%d )
+p1date=$( date -d "${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}t${CDATE:8:2}:00:00+00 +$(( NHRS+24 )) hours" +%Y%m%d )
+now=$m1date
+end=$p1date
+
+set +x
+echo "Generating ESMF mesh from RTOFS files."
+echo "Running in dir \"$PWD\""
+echo "Using files from \"$DOCNdir\""
+echo "Running in dir \"$PWD\""
+set -x
+
+files_to_merge=''
+missing=''
+
+for fhour in $( seq 0 3 $NHRS ) ; do
+ infile="$DOCNdir/rtofs_glo_2ds_${CDATE:0:8}_f"$( printf %03d $fhour ).nc
+ if [[ ! -s "$infile" || ! -r "$infile" ]] ; then
+ echo "RTOFS input file is missing: $infile" 2>&1
+ missing="$missing $infile"
+ else
+ files_to_merge="$files_to_merge $infile"
+ fi
+done
+
+if [[ "${missing:-}Q" != Q ]] ; then
+ set +x
+ echo "You are missing some RTOFS input files!"
+ for infile in $missing ; do
+ echo " missing: $infile"
+ done
+ echo " -> SCRIPT IS ABORTING BECAUSE INPUT FILES ARE MISSING <- "
+ exit 1
+fi
+
+set +x
+echo "RTOFS input files are:"
+for infile in $files_to_merge ; do
+ echo " - $infile"
+done
+echo "Will merge RTOFS files into $merged"
+set -x
+
+# Merge files
+cdo mergetime $files_to_merge rtofs_glo_2ds_prog.nc
+
+# Subset data over HAFS region (lat, 250 to 355.0732 and lon, 0 to 50.0108)
+ncks -O -d X,2198,3511 -d Y,1504,2228 rtofs_glo_2ds_prog.nc rtofs_glo_2ds_prog_v1.nc
+
+# Keep only required variables
+ncks -O -v sst,Latitude,Longitude rtofs_glo_2ds_prog_v1.nc rtofs_glo_2ds_prog_v2.nc
+rm -f rtofs_glo_2ds_prog_v1.nc
+ncks -O -C -x -v Date rtofs_glo_2ds_prog_v2.nc rtofs_glo_2ds_prog_v3.nc
+rm -f rtofs_glo_2ds_prog_v2.nc
+
+# Rename variables to make them CDEPS compatible.
+# We have a workaround in here because I once tried to change a
+# variable name "MT" to "time" in the data, and it was set to the
+# missing value. This could be a bug in the NetCDF Operators, but the
+# following code produces a correct file:
+ncrename -d MT,time rtofs_glo_2ds_prog_v3.nc
+ncap2 -O -v -s 'time=MT' rtofs_glo_2ds_prog_v3.nc rtofs_glo_2ds_prog_v4.nc
+rm -f rtofs_glo_2ds_prog_v3.nc
+ncks -A -C -v time rtofs_glo_2ds_prog_v4.nc rtofs_glo_2ds_prog_v5.nc
+rm -f rtofs_glo_2ds_prog_v4.nc
+ncks -x -v MT rtofs_glo_2ds_prog_v5.nc "$merged"
+rm -f rtofs_glo_2ds_prog_v5.nc
+
+# Rejoice.
+set +x
+echo "RTOFS files successfully merged."
diff --git a/ush/cdeps_utils/produtil b/ush/cdeps_utils/produtil
new file mode 120000
index 000000000..f6a8dbf85
--- /dev/null
+++ b/ush/cdeps_utils/produtil
@@ -0,0 +1 @@
+../produtil/
\ No newline at end of file
diff --git a/ush/hafs/exceptions.py b/ush/hafs/exceptions.py
index 52ecb92e3..b56d4edcf 100644
--- a/ush/hafs/exceptions.py
+++ b/ush/hafs/exceptions.py
@@ -122,6 +122,9 @@ class HAFSFixInsane(HAFSSanityError):
class HAFSArchiveInsane(HAFSSanityError):
"""!Raised when the sanity check of the HAFS archiving settings
fails."""
+class HAFSDataModelInsane(HAFSSanityError):
+ """!Raised when the sanity check of the HAFS data model settings
+ fails."""
########################################################################
# OCEAN AND WAVE EXCEPTIONS
diff --git a/ush/hafs/launcher.py b/ush/hafs/launcher.py
index f9d5ff95c..acd762515 100644
--- a/ush/hafs/launcher.py
+++ b/ush/hafs/launcher.py
@@ -558,6 +558,7 @@ def launch(file_list,cycle,stid,moreopt,case_root,init_dirs=True,
%(section,option,repr(value)))
conf.set(section,option,value)
conf.guess_default_values()
+ conf.set_data_model_variables()
cycling_interval=conf.getfloat('config','cycling_interval',6.0)
cycling_interval=-abs(cycling_interval*3600.0)
if cycle is not None:
@@ -788,6 +789,81 @@ def read_tcvitals_and_messages(self,vitdir=None,vitpattern=None,
revital.readfiles(inputs,raise_all=False)
return revital
+ def timeless_sanity_check_data_models(self,logger):
+ """!In the hafs_launcher job, this checks the data model variables and
+ files for obvious errors on the command line, before submitting jobs."""
+ run_datm=self.getbool('config','run_datm',False)
+ run_docn=self.getbool('config','run_docn',False)
+ run_ocean=self.getbool('config','run_ocean',False)
+ if run_datm and run_docn:
+ msg='run_datm and run_docn cannot both be set to yes'
+ logger.error(msg)
+ raise HAFSDataModelInsane(msg)
+ if run_docn and run_ocean:
+ msg='run_docn and run_ocean cannot both be set to yes'
+ logger.error(msg)
+ raise HAFSDataModelInsane(msg)
+ if run_datm and not run_ocean:
+ msg='run_datm is useless without run_ocean'
+ logger.error(msg)
+ raise HAFSDataModelInsane(msg)
+
+ def sanity_check_data_models(self,logger):
+ """!In the hafs_launcher job, this checks the data model variables and
+ files for obvious errors before starting the rest of the workflow."""
+ run_datm=self.getbool('config','run_datm',False)
+ run_docn=self.getbool('config','run_docn',False)
+ run_ocean=self.getbool('config','run_ocean',False)
+ if run_datm:
+ if run_docn:
+ msg='run_datm and run_docn cannot both be set to yes'
+ logger.error(msg)
+ raise HAFSDataModelInsane(msg)
+ make_mesh_atm=self.getbool('config','make_mesh_atm',True)
+ if not make_mesh_atm:
+ mesh_atm=self.getstr('forecast','mesh_atm')
+ if not os.path.exists(mesh_atm):
+ msg='%s: mesh_atm file does not exist'%(mesh_atm,)
+ logger.error(msg)
+ raise HAFSDataModelInsane(msg)
+ else:
+ logger.info("%s: will use this pre-made datm esmf mesh (mesh_atm)."%(mesh_atm,))
+ if run_docn:
+ make_mesh_ocn=self.getbool('config','make_mesh_ocn',True)
+ if not make_mesh_ocn:
+ mesh_ocn=self.getstr('forecast','mesh_ocn')
+ if not os.path.exists(mesh_ocn):
+ msg='%s: mesh_ocn file does not exist'%(mesh_ocn,)
+ logger.error(msg)
+ raise HAFSDataModelInsane(msg)
+ else:
+ logger.info("%s: will use this pre-made docn esmf mesh (mesh_ocn)."%(mesh_ocn,))
+ if run_ocean:
+ msg='run_ocean=yes and run_docn=yes are incompatible.'
+ logger.error(msg)
+ raise HAFSDataModelInsane(msg)
+
+ def set_data_model_variables(self):
+ """!Sets conf variables for the data models."""
+ run_datm=self.getbool('config','run_datm',False)
+ run_docn=self.getbool('config','run_docn',False)
+ if run_datm:
+ make_mesh_atm=self.getbool('config','make_mesh_atm',True)
+ if make_mesh_atm:
+ self.set('forecast','mesh_atm',self.getraw('forecast','mesh_atm_gen'))
+ else:
+ self.set('forecast','mesh_atm',self.getraw('forecast','mesh_atm_in'))
+ else:
+ self.set('forecast','mesh_atm','dummy.nc')
+ if run_docn:
+ make_mesh_ocn=self.getbool('config','make_mesh_ocn',True)
+ if make_mesh_ocn:
+ self.set('forecast','mesh_ocn',self.getraw('forecast','mesh_ocn_gen'))
+ else:
+ self.set('forecast','mesh_ocn',self.getraw('forecast','mesh_ocn_in'))
+ else:
+ self.set('forecast','mesh_ocn','dummy.nc')
+
def set_storm(self,syndat,oldsyndat):
"""!Sets the storm that is to be run.
@@ -1117,6 +1193,7 @@ def timeless_sanity_check(self,enset=None,logger=None):
'%s: not the same as the launcher.py that is running now '
'(%s) -- check your paths and EXPT.'%(checkme,myfile))
self.sanity_check_forecast_length(logger)
+ self.timeless_sanity_check_data_models(logger)
def sanity_check_forecast_length(self,logger=None):
"""!Ensures the forecast length is valid.
@@ -1204,6 +1281,7 @@ def sanity_check(self):
%(repr(case_root),))
self.sanity_check_archive(logger)
+ self.sanity_check_data_models(logger)
def guess_default_values(self):
"""!Tries to guess default values for many configuration settings.