From fc09992294bf1bcb6b2876942789797508505c70 Mon Sep 17 00:00:00 2001 From: Ben Trumbore Date: Mon, 20 May 2024 14:28:26 -0400 Subject: [PATCH 1/5] Update usecases.rst --- docs/Users_Guide/usecases.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/Users_Guide/usecases.rst b/docs/Users_Guide/usecases.rst index dfee7e0..af11bca 100644 --- a/docs/Users_Guide/usecases.rst +++ b/docs/Users_Guide/usecases.rst @@ -5,6 +5,9 @@ Use Cases Generic CONUS “interesting weather” =================================== +Hurricane Matthew Running on Jetstream2 +=================================== + Land Use/Land Cover Change ========================== From 54d8120e763b3e4e5220c670bd5c5e982ed4c559 Mon Sep 17 00:00:00 2001 From: Ben Trumbore Date: Mon, 20 May 2024 14:37:32 -0400 Subject: [PATCH 2/5] Roll back change to usecases.rst --- docs/Users_Guide/usecases.rst | 3 --- 1 file changed, 3 deletions(-) diff --git a/docs/Users_Guide/usecases.rst b/docs/Users_Guide/usecases.rst index af11bca..dfee7e0 100644 --- a/docs/Users_Guide/usecases.rst +++ b/docs/Users_Guide/usecases.rst @@ -5,9 +5,6 @@ Use Cases Generic CONUS “interesting weather” =================================== -Hurricane Matthew Running on Jetstream2 -=================================== - Land Use/Land Cover Change ========================== From 743b9b7f8cef60bb121e7fb2782931aa51804994 Mon Sep 17 00:00:00 2001 From: George McCabe <23407799+georgemccabe@users.noreply.github.com> Date: Tue, 21 May 2024 16:05:03 -0600 Subject: [PATCH 3/5] Feature #48 Hurricane Matthew METplus (#53) * per #48, create directory structure to store files needed for each use case example * per #48, add in-progress METplus config file * per #48, update use case config to read metar observations and compute statistompute statistics * Configure 2 instances of PointStat to process surface and upper air separately (until dtcenter/METplus#2567 is completed). Adjust settings to produce more useful output, prevent wind vector warnings, filter out duplicate obs, define pressure levels, etc. * change configuration to overwrite fcst level to avoid messy NetCDF level value and change output flag settings to get results that can be more easily plotted * change input directory paths to paths that are expected inside the container that are mounted -- left paths on casper for reference * process all forecast leads and pressure levels * change obs window to prevent observations from being matched with multiple model files * add file window to get multiple observation input files * Per #48, add instructions to run METplus use case on HPC using apptainer * update RTD documentation requirements to specify sphinx * add JetBrains files * add other requirements for documentation * change to consistent format as other entries * Reformat info about local and container paths that are mounted * add WRF configuration files * run MADIS2NC as part of the use case instead of using output from the tool * updated instructions to obtain obs data from artifact stored on DockerHub instead of local directory * updated instructions to use full paths instead of changing directories, added steps to clone i-wrf repo to get METplus config files * removed commented lines --- .idea/.gitignore | 12 ++ .idea/inspectionProfiles/Project_Default.xml | 14 ++ .../inspectionProfiles/profiles_settings.xml | 6 + .idea/misc.xml | 7 + .idea/vcs.xml | 6 + docs/Users_Guide/running.rst | 62 ++++++- docs/requirements.txt | 5 +- use_cases/Air_Quality/METplus/.gitignore | 0 .../Air_Quality/Visualization/.gitignore | 0 use_cases/Air_Quality/WRF/.gitignore | 0 .../Hurricane_Matthew/METplus/.gitignore | 0 .../METplus/PointStat_matthew.conf | 165 ++++++++++++++++++ .../Visualization/.gitignore | 0 use_cases/Hurricane_Matthew/WRF/.gitignore | 0 .../Hurricane_Matthew/WRF/namelist.input | 121 +++++++++++++ use_cases/Hurricane_Matthew/WRF/vars_io.txt | 11 ++ .../Land_Use_Land_Cover/METplus/.gitignore | 0 .../Visualization/.gitignore | 0 use_cases/Land_Use_Land_Cover/WRF/.gitignore | 0 use_cases/Renewable_Energy/METplus/.gitignore | 0 .../Renewable_Energy/Visualization/.gitignore | 0 use_cases/Renewable_Energy/WRF/.gitignore | 0 22 files changed, 406 insertions(+), 3 deletions(-) create mode 100644 .idea/.gitignore create mode 100644 .idea/inspectionProfiles/Project_Default.xml create mode 100644 .idea/inspectionProfiles/profiles_settings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/vcs.xml create mode 100644 use_cases/Air_Quality/METplus/.gitignore create mode 100644 use_cases/Air_Quality/Visualization/.gitignore create mode 100644 use_cases/Air_Quality/WRF/.gitignore create mode 100644 use_cases/Hurricane_Matthew/METplus/.gitignore create mode 100644 use_cases/Hurricane_Matthew/METplus/PointStat_matthew.conf create mode 100644 use_cases/Hurricane_Matthew/Visualization/.gitignore create mode 100644 use_cases/Hurricane_Matthew/WRF/.gitignore create mode 100644 use_cases/Hurricane_Matthew/WRF/namelist.input create mode 100644 use_cases/Hurricane_Matthew/WRF/vars_io.txt create mode 100644 use_cases/Land_Use_Land_Cover/METplus/.gitignore create mode 100644 use_cases/Land_Use_Land_Cover/Visualization/.gitignore create mode 100644 use_cases/Land_Use_Land_Cover/WRF/.gitignore create mode 100644 use_cases/Renewable_Energy/METplus/.gitignore create mode 100644 use_cases/Renewable_Energy/Visualization/.gitignore create mode 100644 use_cases/Renewable_Energy/WRF/.gitignore diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000..ba693c2 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,12 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Editor-based HTTP Client requests +/httpRequests/ +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml +/usage.statistics.xml +/sonarlint* +/*.iml +/modules.xml \ No newline at end of file diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000..1bbf15d --- /dev/null +++ b/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,14 @@ + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000..105ce2d --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000..8d05fcd --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,7 @@ + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000..35eb1dd --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/docs/Users_Guide/running.rst b/docs/Users_Guide/running.rst index 7a051bb..d0f6f9e 100644 --- a/docs/Users_Guide/running.rst +++ b/docs/Users_Guide/running.rst @@ -2,8 +2,8 @@ Running I-WRF Containers ************************ -On an HPC Cluster with Singularity -================================== +On an HPC Cluster with Apptainer +================================ WRF --- @@ -11,6 +11,64 @@ WRF METplus ------- +The following commands were run on Casper. + +Load the apptainer module:: + + module load apptainer + +Change directory to scratch and pull the containers from DockerHub. +This will create a `.sif` file in the current directory:: + + apptainer pull ${SCRATCH}/metplus-dev_develop.sif docker://dtcenter/metplus-dev:develop + apptainer pull ${SCRATCH}/data-matthew-input-obs.sif oras://registry-1.docker.io/ncar/iwrf:data-matthew-input-obs + +Create a directory to store the output:: + + mkdir ${SCRATCH}/metplus_out + +Clone the I-WRF GitHub repository to get the configuration files:: + + git clone https://github.com/NCAR/i-wrf ${SCRATCH}/i-wrf + +Set environment variable to bind directories to container +(note: this can also be accomplished by passing the value on the command line +using the --bind argument) + +* Input data directories for WRF, raob, and metar input data + * WRF: + * Local: /glade/derecho/scratch/jaredlee/nsf_i-wrf/matthew + * Container: /data/input/wrf + * RAOB: + * Local: From data-matthew-input-obs.sif + * Container: /data/input/obs/raob + * METAR: + * Local: From data-matthew-input-obs.sif + * Container: /data/input/obs/metar +* Config directory containing METplus use case configuration file + * Local: ${SCRATCH}/i-wrf/use_cases/Hurricane_Matthew/METplus + * Container: /config +* Output directory to write output + * Local: ${SCRATCH}/metplus_out + * Container: /data/output + +:: + + LOCAL_METPLUS_CONFIG_DIR=${SCRATCH}/i-wrf/use_cases/Hurricane_Matthew/METplus + LOCAL_FCST_INPUT_DIR=/glade/derecho/scratch/jaredlee/nsf_i-wrf/matthew + LOCAL_OUTPUT_DIR=${SCRATCH}/metplus_out + + export APPTAINER_BIND="${SCRATCH}/data-matthew-input-obs.sif:/data/input/obs:image-src=/,${LOCAL_METPLUS_CONFIG_DIR}:/config,${LOCAL_FCST_INPUT_DIR}:/data/input/wrf,${LOCAL_OUTPUT_DIR}:/data/output" + +Execute the run_metplus.py command inside the container to run the use case:: + + apptainer exec ${SCRATCH}/metplus-dev_develop.sif /metplus/METplus/ush/run_metplus.py /config/PointStat_matthew.conf + +Check that the output data was created locally:: + + ls ${SCRATCH}/metplus_out/point_stat -1 + + Visualization ------------- diff --git a/docs/requirements.txt b/docs/requirements.txt index 59572ff..d738588 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,6 @@ -sphinx-gallery +sphinx==5.3.0 +sphinx-gallery==0.14.0 sphinxcontrib-bibtex sphinx-panels +sphinx-rtd-theme==1.3.0 +sphinx-design==0.3.0 diff --git a/use_cases/Air_Quality/METplus/.gitignore b/use_cases/Air_Quality/METplus/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Air_Quality/Visualization/.gitignore b/use_cases/Air_Quality/Visualization/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Air_Quality/WRF/.gitignore b/use_cases/Air_Quality/WRF/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Hurricane_Matthew/METplus/.gitignore b/use_cases/Hurricane_Matthew/METplus/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Hurricane_Matthew/METplus/PointStat_matthew.conf b/use_cases/Hurricane_Matthew/METplus/PointStat_matthew.conf new file mode 100644 index 0000000..fb96f9a --- /dev/null +++ b/use_cases/Hurricane_Matthew/METplus/PointStat_matthew.conf @@ -0,0 +1,165 @@ +[config] + +# For additional information, please see the METplus Users Guide. +# https://metplus.readthedocs.io/en/latest/Users_Guide + +### +# Processes to run +# https://metplus.readthedocs.io/en/latest/Users_Guide/systemconfiguration.html#process-list +### + +PROCESS_LIST = MADIS2NC(metar), MADIS2NC(raob), PointStat(surface), PointStat(upper_air) + + +### +# Time Info +# LOOP_BY options are INIT, VALID, RETRO, and REALTIME +# If set to INIT or RETRO: +# INIT_TIME_FMT, INIT_BEG, INIT_END, and INIT_INCREMENT must also be set +# If set to VALID or REALTIME: +# VALID_TIME_FMT, VALID_BEG, VALID_END, and VALID_INCREMENT must also be set +# LEAD_SEQ is the list of forecast leads to process +# https://metplus.readthedocs.io/en/latest/Users_Guide/systemconfiguration.html#timing-control +### + +LOOP_BY = INIT +INIT_TIME_FMT = %Y%m%d%H +INIT_BEG = 2016100600 +INIT_END = 2016100600 +INIT_INCREMENT = 6H + +LEAD_SEQ = begin_end_incr(0,48,3) + +[metar] +LEAD_SEQ = begin_end_incr(0,48,1) + +[raob] + +LEAD_SEQ = begin_end_incr(0,48,1) + +MADIS2NC_ALLOW_MISSING_INPUTS = True +MADIS2NC_MISSING_INPUT_THRESH = 0.875 + +[config] + +### +# File I/O +# https://metplus.readthedocs.io/en/latest/Users_Guide/systemconfiguration.html#directory-and-filename-template-info +### + +MADIS2NC_INPUT_DIR = /data/input/obs +MADIS2NC_INPUT_TEMPLATE = {instance}/{valid?fmt=%Y%m%d_%H%M} + +MADIS2NC_OUTPUT_DIR = {OUTPUT_BASE}/madis2nc +MADIS2NC_OUTPUT_TEMPLATE = {instance}/met_{valid?fmt=%Y%m%d_%H%M}.nc + +MADIS2NC_TYPE = {instance} + +FCST_POINT_STAT_INPUT_DIR = /data/input/wrf + +[surface] + +FCST_POINT_STAT_INPUT_TEMPLATE = {init?fmt=%Y%m%d_%H}/wrfout_d01_{valid?fmt=%Y-%m-%d_%H:%M:%S} + +OBS_POINT_STAT_INPUT_DIR = {MADIS2NC_OUTPUT_DIR}/metar +OBS_POINT_STAT_INPUT_TEMPLATE = met_{valid?fmt=%Y%m%d_%H%M}.nc + +POINT_STAT_MESSAGE_TYPE = ADPSFC + +FCST_VAR1_NAME = T2 +FCST_VAR1_LEVELS = "(0,*,*)" +FCST_VAR1_OPTIONS = set_attr_level = "Z2" + +OBS_VAR1_NAME = TMP +OBS_VAR1_LEVELS = Z2 + +FCST_VAR2_NAME = U10 +FCST_VAR2_LEVELS = "(0,*,*)" +FCST_VAR2_OPTIONS = set_attr_level = "Z10" + +OBS_VAR2_NAME = UGRD +OBS_VAR2_LEVELS = Z10 + +FCST_VAR3_NAME = V10 +FCST_VAR3_LEVELS = "(0,*,*)" +FCST_VAR3_OPTIONS = set_attr_level = "Z10" + +OBS_VAR3_NAME = VGRD +OBS_VAR3_LEVELS = Z10 + +OBS_POINT_STAT_WINDOW_BEGIN = -1799 +OBS_POINT_STAT_WINDOW_END = 1800 + +OBS_POINT_STAT_FILE_WINDOW_BEGIN = -1H + +[upper_air] + +FCST_POINT_STAT_INPUT_TEMPLATE = {init?fmt=%Y%m%d_%H}/wrfout_plev_d01_{valid?fmt=%Y-%m-%d_%H:%M:%S} + +OBS_POINT_STAT_INPUT_DIR = {MADIS2NC_OUTPUT_DIR}/raob +OBS_POINT_STAT_INPUT_TEMPLATE = met_{valid?fmt=%Y%m%d_%H%M}.nc + +POINT_STAT_MESSAGE_TYPE = ADPUPA + +# WRF pressure levels: +# 92500,85000,70000,50000,40000,30000,25000,20000,15000,10000 + +UPPER_AIR_FCST_LEVELS = "(0,@92500,*,*)", "(0,@85000,*,*)" , "(0,@70000,*,*)", "(0,@50000,*,*)", "(0,@40000,*,*)", "(0,@30000,*,*)", "(0,@25000,*,*)", "(0,@20000,*,*)", "(0,@15000,*,*)", "(0,@10000,*,*)" +UPPER_AIR_OBS_LEVELS = P925, P850, P700, P500, P400, P300, P250, P200, P150, P100 + +FCST_VAR1_NAME = T_PL +FCST_VAR1_LEVELS = {UPPER_AIR_FCST_LEVELS} + +OBS_VAR1_NAME = TMP +OBS_VAR1_LEVELS = {UPPER_AIR_OBS_LEVELS} + +FCST_VAR2_NAME = U_PL +FCST_VAR2_LEVELS = {UPPER_AIR_FCST_LEVELS} + +OBS_VAR2_NAME = UGRD +OBS_VAR2_LEVELS = {UPPER_AIR_OBS_LEVELS} + +FCST_VAR3_NAME = V_PL +FCST_VAR3_LEVELS = {UPPER_AIR_FCST_LEVELS} + +OBS_VAR3_NAME = VGRD +OBS_VAR3_LEVELS = {UPPER_AIR_OBS_LEVELS} + +OBS_POINT_STAT_WINDOW_BEGIN = -5399 +OBS_POINT_STAT_WINDOW_END = 5400 + +OBS_POINT_STAT_FILE_WINDOW_BEGIN = -2H +OBS_POINT_STAT_FILE_WINDOW_END = 2H + +[config] + +POINT_STAT_ONCE_PER_FIELD = False + +POINT_STAT_OUTPUT_DIR = {OUTPUT_BASE}/point_stat + +### +# PointStat Settings +# https://metplus.readthedocs.io/en/latest/Users_Guide/wrappers.html#pointstat +### + +POINT_STAT_MET_CONFIG_OVERRIDES = wind_thresh = [ >2 ]; wind_logic = INTERSECTION; + +POINT_STAT_DUPLICATE_FLAG = UNIQUE +POINT_STAT_OBS_SUMMARY = NEAREST + +POINT_STAT_INTERP_TYPE_METHOD = BILIN +POINT_STAT_INTERP_TYPE_WIDTH = 2 + +POINT_STAT_OUTPUT_FLAG_CNT = BOTH +POINT_STAT_OUTPUT_FLAG_SL1L2 = STAT +POINT_STAT_OUTPUT_FLAG_VL1L2 = STAT +POINT_STAT_OUTPUT_FLAG_VCNT = BOTH + +MODEL = WRF + +POINT_STAT_DESC = NA +OBTYPE = + +POINT_STAT_OUTPUT_PREFIX = {instance} + +POINT_STAT_MASK_GRID = FULL diff --git a/use_cases/Hurricane_Matthew/Visualization/.gitignore b/use_cases/Hurricane_Matthew/Visualization/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Hurricane_Matthew/WRF/.gitignore b/use_cases/Hurricane_Matthew/WRF/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Hurricane_Matthew/WRF/namelist.input b/use_cases/Hurricane_Matthew/WRF/namelist.input new file mode 100644 index 0000000..d38b541 --- /dev/null +++ b/use_cases/Hurricane_Matthew/WRF/namelist.input @@ -0,0 +1,121 @@ + &time_control + run_days = 0, + run_hours = 48, + run_minutes = 0, + run_seconds = 0, + start_year = 2016, 2019, + start_month = 10, 09, + start_day = 06, 04, + start_hour = 00, 12, + end_year = 2016, 2019, + end_month = 10, 09, + end_day = 08, 06, + end_hour = 00, 00, + interval_seconds = 21600, + input_from_file = .true.,.true., + history_interval = 180, 60, + frames_per_outfile = 1, 1, + restart = .false., + restart_interval = 1440, + io_form_history = 2, + io_form_restart = 2, + io_form_input = 2, + io_form_boundary = 2, + iofields_filename = "vars_io.txt", "vars_io.txt", + auxhist22_outname = "wrfout_zlev_d_", + auxhist22_interval = 180, 180, + frames_per_auxhist22 = 1, 1, + io_form_auxhist22 = 2 + auxhist23_outname = "wrfout_plev_d_", + auxhist23_interval = 180, 180, + frames_per_auxhist23 = 1, 1, + io_form_auxhist23 = 2, + / + + &domains + time_step = 90, + time_step_fract_num = 0, + time_step_fract_den = 1, + max_dom = 1, + e_we = 91, 220, + e_sn = 100, 214, + e_vert = 45, 45, + dzstretch_s = 1.1 + p_top_requested = 5000, + num_metgrid_levels = 32, + num_metgrid_soil_levels = 4, + dx = 27000, + dy = 27000, + grid_id = 1, 2, + parent_id = 0, 1, + i_parent_start = 1, 53, + j_parent_start = 1, 25, + parent_grid_ratio = 1, 3, + parent_time_step_ratio = 1, 3, + feedback = 1, + smooth_option = 0 + / + + &physics + physics_suite = 'CONUS' + mp_physics = -1, -1, + cu_physics = -1, -1, + ra_lw_physics = -1, -1, + ra_sw_physics = -1, -1, + bl_pbl_physics = -1, -1, + sf_sfclay_physics = -1, -1, + sf_surface_physics = -1, -1, + radt = 15, 15, + bldt = 0, 0, + cudt = 0, 0, + icloud = 1, + num_land_cat = 21, + sf_urban_physics = 0, 0, + fractional_seaice = 1, + / + + &fdda + / + + &dynamics + hybrid_opt = 2, + w_damping = 0, + diff_opt = 2, 2, + km_opt = 4, 4, + diff_6th_opt = 0, 0, + diff_6th_factor = 0.12, 0.12, + base_temp = 290. + damp_opt = 3, + zdamp = 5000., 5000., + dampcoef = 0.2, 0.2, + khdif = 0, 0, + kvdif = 0, 0, + non_hydrostatic = .true., .true., + moist_adv_opt = 1, 1, + scalar_adv_opt = 1, 1, + gwd_opt = 1, 0, + / + + &bdy_control + spec_bdy_width = 5, + specified = .true. + / + + &grib2 + / + + &namelist_quilt + nio_tasks_per_group = 0, + nio_groups = 1, + / + + &diags + z_lev_diags = 1, + num_z_levels = 6, + z_levels = -80,-100,-200,-300,-400,-500 + p_lev_diags = 1, + num_press_levels = 10, + press_levels = 92500,85000,70000,50000,40000,30000,25000,20000,15000,10000 + use_tot_or_hyd_p = 1, + solar_diagnostics = 0, + / diff --git a/use_cases/Hurricane_Matthew/WRF/vars_io.txt b/use_cases/Hurricane_Matthew/WRF/vars_io.txt new file mode 100644 index 0000000..0dcb4dd --- /dev/null +++ b/use_cases/Hurricane_Matthew/WRF/vars_io.txt @@ -0,0 +1,11 @@ +-:h:0:VAR_SSO,GOT_VAR_SSO,VAR,CON,OA1,OA2,OA3,OA4,OL1,OL2,OL3,OL4,BATHYMETRY_FLAG +-:h:0:HFX_FORCE,LH_FORCE,TSK_FORCE,TSK_FORCE_TEND,HFX_FORCE_TEND,LH_FORCE_TEND +-:h:0:FNM,FNP,RDNW,RDN,DNW,DN,CFN,CFN1,THIS_IS_AN_IDEAL_RUN,RDX,RDY,RESM +-:h:0:ZETATOP,CF1,CF2,CF3,ITIMESTEP +-:h:0:MAPFAC_M,MAPFAC_U,MAPFAC_V,MAPFAC_MX,MAPFAC_MY,MAPFAC_UX,MAPFAC_UY,MAPFAC_VX,MF_VX_INV,MAPFAC_VY,MAX_MSFTX,MAX_MSFTY +-:h:0:F,E,SINALPHA,COSALPHA +-:h:0:SAVE_TOPO_FROM_REAL +-:h:0:C1H,C2H,C1F,C2F,C3H,C4H,C3F,C4F,CLAT, +-:h:0:ISEEDARR_SPPT,ISEEDARR_SKEBS,ISEEDARR_RAND_PERTURB,ISEEDARRAY_SPP_CONV,ISEEDARRAY_SPP_PBL,ISEEDARRAY_SPP_LSM +-:h:22:C1H,C2H,C1F,C2F,C3H,C4H,C3F,C4F,Q2,T2,U10,V10 +-:h:23:C1H,C2H,C1F,C2F,C3H,C4H,C3F,C4F,Q2,T2,U10,V10 diff --git a/use_cases/Land_Use_Land_Cover/METplus/.gitignore b/use_cases/Land_Use_Land_Cover/METplus/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Land_Use_Land_Cover/Visualization/.gitignore b/use_cases/Land_Use_Land_Cover/Visualization/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Land_Use_Land_Cover/WRF/.gitignore b/use_cases/Land_Use_Land_Cover/WRF/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Renewable_Energy/METplus/.gitignore b/use_cases/Renewable_Energy/METplus/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Renewable_Energy/Visualization/.gitignore b/use_cases/Renewable_Energy/Visualization/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/use_cases/Renewable_Energy/WRF/.gitignore b/use_cases/Renewable_Energy/WRF/.gitignore new file mode 100644 index 0000000..e69de29 From 62241972254359ffb0555e368c1402aeb72e39f4 Mon Sep 17 00:00:00 2001 From: Ben Trumbore Date: Wed, 29 May 2024 16:29:01 -0400 Subject: [PATCH 4/5] Initial publication of instructions for running I-WRF on Jetstream2 (#54) Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> --- .github/workflows/documentation.yml | 8 +- .gitignore | 1 + docs/Users_Guide/matthewjetstream.rst | 270 ++++++++++++++++++++++++++ docs/Users_Guide/usecases.rst | 5 + 4 files changed, 280 insertions(+), 4 deletions(-) create mode 100644 docs/Users_Guide/matthewjetstream.rst diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index b2a40e9..d5b030f 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -17,8 +17,8 @@ jobs: name: Documentation runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: '3.8' - name: Install dependencies @@ -27,12 +27,12 @@ jobs: python -m pip install python-dateutil requests - name: Build Documentation run: ./.github/jobs/build_documentation.sh - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: name: i-wrf_documentation path: artifact/documentation - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: failure() with: name: documentation_warnings.log diff --git a/.gitignore b/.gitignore index b25c15b..a271a97 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ *~ +.vs diff --git a/docs/Users_Guide/matthewjetstream.rst b/docs/Users_Guide/matthewjetstream.rst new file mode 100644 index 0000000..8f0ae79 --- /dev/null +++ b/docs/Users_Guide/matthewjetstream.rst @@ -0,0 +1,270 @@ +:orphan: + +.. _matthewjetstream: + +Running I-WRF On Jetstream2 with Hurricane Matthew Data +******************************************************* + +Overview +======== + +The following instructions can be used to run +the `I-WRF weather simulation program `_ +from the `National Center for Atmospheric Research (NCAR) `_ +with data from `Hurricane Matthew `_ +on the `Jetstream2 cloud computing platform `_. +This exercise provides an introduction to using cloud computing platforms, +running computationally complex simulations and using containerized applications. + +Simulations like I-WRF often require greater computing resources +than you may have on your personal computer, +but a cloud computing platform can provided the needed computational power. +Jetstream2 is a national cyberinfrastructure resource that is easy to use +and is available to researchers and educators. +This exercise runs the I-WRF program as a Docker "container", +which simplifies the set-up work needed to run the simulation. + +It is recommended that you follow the instructions in each section in the order presented +to avoid encountering issues during the process. +Most sections refer to external documentation to provide details about the necessary steps +and to offer additional background information. + +Prepare to Use Jetstream2 +========================= + +To `get started with Jetstream2 `_, +you will need to: + +* Create an account with the `National Science Foundation (NSF) `_'s `ACCESS program `_. +* Request a computational "allocation" from ACCESS. +* Log in to Jetstream2's web portal. + +The sections below will guide you through this process. + +Create an ACCESS Account +------------------------ + +If you do not already have one, `register for an ACCESS account `_. +Note that you can either choose to associate your existing University/Organizational account or +create an entirely new ACCESS account when registering. + +Get an Allocation +----------------- + +With your ACCESS account set up, you may `request an allocation `_ +that will allow you to use an ACCESS-affiliated cyberinfrastructure resource. +Be sure to read all of the information on that page so that you make a suitable request. +An "Explore" project will be sufficient to work with this exercise, +and you will want to work with the resource "Indiana Jetstream2 CPU" (*not* GPU). +The typical turnaround time for allocation requests is one business day. + +Log in to the Exosphere Web Site +-------------------------------- + +Once you have an ACCESS account and allocation, +you can log in to their `Exosphere web dashboard `_. +The process of identifying your allocation and ACCESS ID to use Jetstream2 +is described on `this page `__ of the +`Introduction to Jetstream2 `_ Cornell Virtual Workshop, +and on `this page `__ +of the `Jetstream2 documentation `_. + +While adding an allocation to your account, it is recommended that you choose +the "Indiana University" region of Jetstream2 for completing this exercise. + +Create a Cloud Instance and Log In +================================== + +After you have logged in to Jetstream2 and added your allocation to your account, +you are ready to create the cloud instance where you will run the I-WRF simulation. +If you are not familiar with the cloud computing terms "image" and "instance", +it is recommended that you `read about them `__ +before proceeding. + +Create an SSH Key +----------------- + +You must upload a public SSH key to Jetstream2 before creating your instance. +Jetstream2 injects that public key into the instance's default user account, +and you will need to provide the matching private SSH key to log in to the instance. +If you are not familiar with "SSH key pairs", you should +`read about them `__ before continuing. + +* First, `create an SSH Key on your computer `_ using the "ssh-keygen" command. That command allows you to specify the name and location of the private key file it creates, with the default being "id_rsa". The matching public key file is saved to the same location and name with ".pub" appended to the filename. Later instructions will assume that your private key file is named "id_rsa", but you may choose a different name now and use that name in those later instructions. +* Then, `upload the public key to Jetstream2 `_ through the Exosphere web interface. + +Create an Instance +------------------ + +The Cornell Virtual Workshop topic `Creating an Instance `_ +provides detailed information about creating a Jetstream2 instance. +While following those steps, be sure to make the following choices for this instance: + +* When choosing an image as the instance source, if viewing "By Type", select the "Ubuntu 22.04 (latest)" image. If viewing "By Image", choose the "Featured-Ubuntu22" image. +* Choose the "Flavor" m3.quad (4 CPUs) to provide a faster simulation run-time. +* Select a custom disk size of 100 GB - large enough to hold this exercise's data and results. +* Select the SSH public key that you uploaded previously. +* You do not need to set any of the Advanced Options. + +After clicking the "Create" button, wait for the instance to enter the "Ready" state (it takes several minutes). +Note that the instance will not only be created, but will be running so that you can log in right away. + +Log in to the Instance +---------------------- + +The Exosphere web dashboard provides the easy-to-use Web Shell for accessing your Jetstream2 instances, +but after encountering some issues with this exercise when using Web Shell, +we are recommending that you use the SSH command to access your instance from a shell on your computer. +The instructions for `connecting to Jetstream2 using SSH `_ +can executed in the Command Prompt on Windows (from the Start menu, type "cmd" and select Command Prompt) +or from the Terminal application on a Mac. + +In either case you will need to know the location and name of the private SSH key created on your computer (see above), +the IP address of your instance (found in the Exosphere web dashboard) +and the default username on your instance, which is "exouser". + +Once you are logged in to the web shell you can proceed to the +"Install Software and Download Data" section below. +You will know that your login has been successful when the prompt has the form ``exouser@instance-name:~$``, +which indicates your username, the instance name, and your current working directory, followed by "$" + +Managing a Jetstream2 Instance +------------------------------ + +In order to use cloud computing resources efficiently, you must know how to +`manage your instances `_. +Instances incur costs whenever they are running (on Jetstream2, this is when they are "Ready"). +"Shelving" an instance stops it from using the cloud's CPUs and memory, +and therefore stops it from incurring any charges against your allocation. + +When you are through working on this exercise, +be sure to use the instance's "Actions" menu in the web dashboard to +"Shelve" the instance so that it is no longer spending your credits. +If you later return to the dashboard and want to use the instance again, +Use the Action menu's "Unshelve" option to start the instance up again. +Note that any programs that were running when you shelve the instance will be lost, +but the contents of the disk are preserved when shelving. + +You may also want to try the "Resize" action to change the number of CPUs of the instance. +Increasing the number of CPUs (say, to flavor "m3.8") can make your computations finish more quickly. +But of course, doubling the number of CPUs doubles the cost per hour to run the instance, +so Shelving as soon as you are done becomes even more important! + +Install Software and Download Data +================================== + +With your instance created and running and you logged in to it through a Web Shell, +you can now install the necessary software and download the data to run the simulation. +You will only need to perform these steps once, +as they essentially change the contents of the instance's disk +and those changes will remain even after the instance is shelved and unshelved. + +The following sections instruct you to issue numerous Linux commands in your web shell. +If you are not familiar with Linux, you may want to want to refer to +`An Introduction to Linux `_ when working through these steps. +The commands in each section can be copied using the button in the upper right corner +and then pasted into your web shell by right-clicking. + +If your web shell ever becomes unresponsive or disconnected from the instance, +you can recover from that situation by rebooting the instance. +In the Exosphere dashboard page for your instance, in the Actions menu, select "Reboot". +The process takes several minutes, after which the instance status will return to "Ready". + +Install Docker and Get the I-WRF Image +-------------------------------------- + +As mentioned above, the I-WRF simulation application is provided as a Docker image that will run as a +`"container" `_ +on your cloud instance. +To run a Docker container, you must first install the Docker Engine on your instance. +You can then "pull" (download) the I-WRF image that will be run as a container. + +The `instructions for installing Docker Engine on Ubuntu `_ +are very thorough and make a good reference, but we only need to perform a subset of those steps. +These commands run a script that sets up the Docker software repository on your instance, +then installs Docker:: + + curl --location https://bit.ly/3R3lqMU > install-docker.sh + source install-docker.sh + +If a text dialog is displayed asking which services should be restarted, type ``Enter``. +When the installation is complete, you can verify that the Docker command line tool works by asking for its version:: + + docker --version + +Next, you must start the Docker daemon, which runs in the background and processes commands:: + + sudo service docker start + +If that command appeared to succeed, you can confirm its status with this command:: + + sudo systemctl --no-pager status docker + +Once all of that is in order, you must pull the latest version of the I-WRF image onto your instance:: + + docker pull ncar/iwrf + +Get the Geographic Data +----------------------- + +To run I-WRF on the Hurricane Matthew data set, you need a copy of the +geographic data representing the terrain in the area of the simulation. +These commands download an archive file containing that data, +uncompress the archive into a folder named "WPS_GEOG", and delete the archive file. +They take several minutes to complete:: + + wget https://www2.mmm.ucar.edu/wrf/src/wps_files/geog_high_res_mandatory.tar.gz + tar -xzf geog_high_res_mandatory.tar.gz + rm geog_high_res_mandatory.tar.gz + +Create the Run Folder +--------------------- + +The simulation is performed using a script that must first be downloaded. +The script expects to run in a folder where it can download data files and create result files. +The instructions in this exercise create that folder in the user's home directory and name it "matthew". +The simulation script is called "run.sh". +The following commands create the empty folder and download the script into it, +then change its permissions so it can be run:: + + mkdir matthew + curl --location https://bit.ly/3KoBtRK > matthew/run.sh + chmod 775 matthew/run.sh + +Run I-WRF +========= + +With everything in place, you are now ready to run the Docker container that will perform the simulation. +The downloaded script runs inside the container, prints lots of status information, +and creates output files in the run folder you created. +Execute this command to run the simulation in your web shell:: + + time docker run --shm-size 14G -it -v ~/:/home/wrfuser/terrestrial_data -v ~/matthew:/tmp/hurricane_matthew ncar/iwrf:latest /tmp/hurricane_matthew/run.sh + +The command has numerous arguments and options, which do the following: + +* ``time docker run`` prints the runtime of the "docker run" command. +* ``--shm-size 14G -it`` tells the command how much shared memory to use, and to run interactively in the shell. +* The ``-v`` options map folders in your cloud instance to paths within the container. +* ``ncar/iwrf:latest`` is the Docker image to use when creating the container. +* ``/tmp/hurricane_matthew/run.sh`` is the location within the container of the script that it runs. + +The simulation initially prints lots of information while initializing things, then settles in to the computation. +The provided configuration simulates 12 hours of weather and takes under three minutes to finish on an m3.quad Jetstream2 instance. +Once completed, you can view the end of any of the output files to confirm that it succeeded:: + + tail matthew/rsl.out.0000 + +The output should look something like this:: + + Timing for main: time 2016-10-06_11:42:30 on domain 1: 0.23300 elapsed seconds + Timing for main: time 2016-10-06_11:45:00 on domain 1: 0.23366 elapsed seconds + Timing for main: time 2016-10-06_11:47:30 on domain 1: 2.77688 elapsed seconds + Timing for main: time 2016-10-06_11:50:00 on domain 1: 0.23415 elapsed seconds + Timing for main: time 2016-10-06_11:52:30 on domain 1: 0.23260 elapsed seconds + Timing for main: time 2016-10-06_11:55:00 on domain 1: 0.23354 elapsed seconds + Timing for main: time 2016-10-06_11:57:30 on domain 1: 0.23345 elapsed seconds + Timing for main: time 2016-10-06_12:00:00 on domain 1: 0.23407 elapsed seconds + Timing for Writing wrfout_d01_2016-10-06_12:00:00 for domain 1: 0.32534 elapsed seconds + d01 2016-10-06_12:00:00 wrf: SUCCESS COMPLETE WRF + diff --git a/docs/Users_Guide/usecases.rst b/docs/Users_Guide/usecases.rst index dfee7e0..aea980d 100644 --- a/docs/Users_Guide/usecases.rst +++ b/docs/Users_Guide/usecases.rst @@ -5,6 +5,11 @@ Use Cases Generic CONUS “interesting weather” =================================== +Hurricane Matthew on Jetstream2 +=============================== + +Navigate to :ref:`matthewjetstream` for more information. + Land Use/Land Cover Change ========================== From 03f1f0b09d0bb66478c5b90837aad9da0e79bd11 Mon Sep 17 00:00:00 2001 From: George McCabe <23407799+georgemccabe@users.noreply.github.com> Date: Thu, 30 May 2024 12:13:54 -0600 Subject: [PATCH 5/5] Added sphinx copybutton (#55) * add sphinx copybutton extension * changed workflow to install packages using docs/requirements.txt to match what is used in ReadTheDocs * make copy button always visible, not just when the user hovers over the code block --- .github/workflows/documentation.yml | 3 +-- docs/_static/theme_override.css | 4 ++++ docs/conf.py | 6 +++++- docs/requirements.txt | 1 + 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index d5b030f..eeba7bc 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -23,8 +23,7 @@ jobs: python-version: '3.8' - name: Install dependencies run: | - python -m pip install --upgrade sphinx sphinx-gallery sphinx_rtd_theme - python -m pip install python-dateutil requests + python -m pip install --upgrade -r docs/requirements.txt - name: Build Documentation run: ./.github/jobs/build_documentation.sh - uses: actions/upload-artifact@v4 diff --git a/docs/_static/theme_override.css b/docs/_static/theme_override.css index a03592c..c727aad 100644 --- a/docs/_static/theme_override.css +++ b/docs/_static/theme_override.css @@ -26,3 +26,7 @@ div[class^="highlight"] { overflow: visible !important; } } + +button.copybtn { + opacity: 1; +} diff --git a/docs/conf.py b/docs/conf.py index 1fce8f1..e0cbb83 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -32,7 +32,11 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.autodoc','sphinx.ext.intersphinx',] +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx_copybutton', +] # settings for ReadTheDocs PDF creation latex_engine = 'pdflatex' diff --git a/docs/requirements.txt b/docs/requirements.txt index d738588..60b1b6a 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -4,3 +4,4 @@ sphinxcontrib-bibtex sphinx-panels sphinx-rtd-theme==1.3.0 sphinx-design==0.3.0 +sphinx-copybutton